ngram
listlengths
0
82k
[ "# Make sure to only set the hash after the", "central_atom_indices = sorted( map_to_atom_index[task.central_bond[i]] for i in (0, 1) )", "None: return task_id.decode() task_id = compute.delay(task_json=task.json()).id redis_connection.hset(\"qcgenerator:types\", task_id, task.type) #", "hits. canonical_molecule = canonical_order_atoms( Molecule.from_smiles(task.smiles, allow_undefined_stereo=True) ) if isinstance(task, Torsion1DTask):", "# Canonicalize the task to improve the cache hit rate.", "rate. task = _canonicalize_task(task) task_hash = hashlib.sha512(task.json().encode()).hexdigest() task_id = redis_connection.hget(\"qcgenerator:task-ids\",", "atom_index: (i + 1) for i, atom_index in enumerate(central_atom_indices) }", "worker.compute_optimization elif isinstance(task, HessianTask): compute = worker.compute_hessian else: raise NotImplementedError()", "the SMILES has a canonical ordering to help ensure cache", "canonical_smiles return task def cached_compute_task( task: Union[HessianTask, OptimizationTask, Torsion1DTask], redis_connection:", "OptimizationTask, Torsion1DTask) def _canonicalize_task(task: _T) -> _T: task = task.copy(deep=True)", "_T: task = task.copy(deep=True) # Ensure the SMILES has a", "j: i for i, j in canonical_molecule.properties[\"atom_map\"].items() } central_atom_indices =", "TypeVar, Union import redis from openff.toolkit.topology import Molecule from openff.bespokefit.executor.services.qcgenerator", "to help ensure cache hits. canonical_molecule = canonical_order_atoms( Molecule.from_smiles(task.smiles, allow_undefined_stereo=True)", "not None: return task_id.decode() task_id = compute.delay(task_json=task.json()).id redis_connection.hset(\"qcgenerator:types\", task_id, task.type)", "allow_undefined_stereo=True) ) if isinstance(task, Torsion1DTask): map_to_atom_index = { j: i", "import TypeVar, Union import redis from openff.toolkit.topology import Molecule from", "= _canonicalize_task(task) task_hash = hashlib.sha512(task.json().encode()).hexdigest() task_id = redis_connection.hget(\"qcgenerator:task-ids\", task_hash) if", ") canonical_molecule.properties[\"atom_map\"] = { atom_index: (i + 1) for i,", "-> _T: task = task.copy(deep=True) # Ensure the SMILES has", "= (1, 2) else: canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=False", "= { j: i for i, j in canonical_molecule.properties[\"atom_map\"].items() }", "information is entered and subsequently discarded. redis_connection.hset(\"qcgenerator:task-ids\", task_hash, task_id) return", "task: Union[HessianTask, OptimizationTask, Torsion1DTask], redis_connection: redis.Redis, ) -> str: \"\"\"Checks", "if task_id is not None: return task_id.decode() task_id = compute.delay(task_json=task.json()).id", "is not None: return task_id.decode() task_id = compute.delay(task_json=task.json()).id redis_connection.hset(\"qcgenerator:types\", task_id,", "after the type is set in case the connection #", "in (0, 1) ) canonical_molecule.properties[\"atom_map\"] = { atom_index: (i +", "canonical_order_atoms _T = TypeVar(\"_T\", HessianTask, OptimizationTask, Torsion1DTask) def _canonicalize_task(task: _T)", "from openff.bespokefit.schema.tasks import HessianTask, OptimizationTask, Torsion1DTask from openff.bespokefit.utilities.molecule import canonical_order_atoms", "j in canonical_molecule.properties[\"atom_map\"].items() } central_atom_indices = sorted( map_to_atom_index[task.central_bond[i]] for i", "_canonicalize_task(task: _T) -> _T: task = task.copy(deep=True) # Ensure the", "return task def cached_compute_task( task: Union[HessianTask, OptimizationTask, Torsion1DTask], redis_connection: redis.Redis,", "canonical_molecule.properties[\"atom_map\"].items() } central_atom_indices = sorted( map_to_atom_index[task.central_bond[i]] for i in (0,", "else: raise NotImplementedError() # Canonicalize the task to improve the", "# Ensure the SMILES has a canonical ordering to help", "= worker.compute_optimization elif isinstance(task, HessianTask): compute = worker.compute_hessian else: raise", "redis from openff.toolkit.topology import Molecule from openff.bespokefit.executor.services.qcgenerator import worker from", "hash after the type is set in case the connection", "from typing import TypeVar, Union import redis from openff.toolkit.topology import", "not send it to a worker. \"\"\" if isinstance(task, Torsion1DTask):", "hit rate. task = _canonicalize_task(task) task_hash = hashlib.sha512(task.json().encode()).hexdigest() task_id =", "= canonical_order_atoms( Molecule.from_smiles(task.smiles, allow_undefined_stereo=True) ) if isinstance(task, Torsion1DTask): map_to_atom_index =", "{ atom_index: (i + 1) for i, atom_index in enumerate(central_atom_indices)", "Molecule.from_smiles(task.smiles, allow_undefined_stereo=True) ) if isinstance(task, Torsion1DTask): map_to_atom_index = { j:", "(1, 2) else: canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=False )", "str: \"\"\"Checks to see if a QC task has already", "1) ) canonical_molecule.properties[\"atom_map\"] = { atom_index: (i + 1) for", "improve the cache hit rate. task = _canonicalize_task(task) task_hash =", "def _canonicalize_task(task: _T) -> _T: task = task.copy(deep=True) # Ensure", "task.central_bond = (1, 2) else: canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True,", "\"\"\"Checks to see if a QC task has already been", "openff.bespokefit.schema.tasks import HessianTask, OptimizationTask, Torsion1DTask from openff.bespokefit.utilities.molecule import canonical_order_atoms _T", "import HessianTask, OptimizationTask, Torsion1DTask from openff.bespokefit.utilities.molecule import canonical_order_atoms _T =", ") task.smiles = canonical_smiles return task def cached_compute_task( task: Union[HessianTask,", "_T = TypeVar(\"_T\", HessianTask, OptimizationTask, Torsion1DTask) def _canonicalize_task(task: _T) ->", "task_id = redis_connection.hget(\"qcgenerator:task-ids\", task_hash) if task_id is not None: return", "compute = worker.compute_hessian else: raise NotImplementedError() # Canonicalize the task", "already been executed and if not send it to a", "canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=True ) task.central_bond = (1, 2) else:", "= compute.delay(task_json=task.json()).id redis_connection.hset(\"qcgenerator:types\", task_id, task.type) # Make sure to only", "before this information is entered and subsequently discarded. redis_connection.hset(\"qcgenerator:task-ids\", task_hash,", "map_to_atom_index = { j: i for i, j in canonical_molecule.properties[\"atom_map\"].items()", "i, atom_index in enumerate(central_atom_indices) } canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True,", "Canonicalize the task to improve the cache hit rate. task", "2) else: canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=False ) task.smiles", "canonical_molecule = canonical_order_atoms( Molecule.from_smiles(task.smiles, allow_undefined_stereo=True) ) if isinstance(task, Torsion1DTask): map_to_atom_index", "isinstance(task, OptimizationTask): compute = worker.compute_optimization elif isinstance(task, HessianTask): compute =", "the hash after the type is set in case the", "see if a QC task has already been executed and", "TypeVar(\"_T\", HessianTask, OptimizationTask, Torsion1DTask) def _canonicalize_task(task: _T) -> _T: task", "redis_connection: redis.Redis, ) -> str: \"\"\"Checks to see if a", "isinstance(task, Torsion1DTask): compute = worker.compute_torsion_drive elif isinstance(task, OptimizationTask): compute =", "i, j in canonical_molecule.properties[\"atom_map\"].items() } central_atom_indices = sorted( map_to_atom_index[task.central_bond[i]] for", "-> str: \"\"\"Checks to see if a QC task has", "to see if a QC task has already been executed", "+ 1) for i, atom_index in enumerate(central_atom_indices) } canonical_smiles =", "hashlib.sha512(task.json().encode()).hexdigest() task_id = redis_connection.hget(\"qcgenerator:task-ids\", task_hash) if task_id is not None:", "Torsion1DTask) def _canonicalize_task(task: _T) -> _T: task = task.copy(deep=True) #", "def cached_compute_task( task: Union[HessianTask, OptimizationTask, Torsion1DTask], redis_connection: redis.Redis, ) ->", "} central_atom_indices = sorted( map_to_atom_index[task.central_bond[i]] for i in (0, 1)", "import Molecule from openff.bespokefit.executor.services.qcgenerator import worker from openff.bespokefit.schema.tasks import HessianTask,", "task = task.copy(deep=True) # Ensure the SMILES has a canonical", "it to a worker. \"\"\" if isinstance(task, Torsion1DTask): compute =", "isinstance(task, Torsion1DTask): map_to_atom_index = { j: i for i, j", "atom_index in enumerate(central_atom_indices) } canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=True", "from openff.bespokefit.executor.services.qcgenerator import worker from openff.bespokefit.schema.tasks import HessianTask, OptimizationTask, Torsion1DTask", "only set the hash after the type is set in", "task_id is not None: return task_id.decode() task_id = compute.delay(task_json=task.json()).id redis_connection.hset(\"qcgenerator:types\",", "connection # goes down before this information is entered and", "= TypeVar(\"_T\", HessianTask, OptimizationTask, Torsion1DTask) def _canonicalize_task(task: _T) -> _T:", "(i + 1) for i, atom_index in enumerate(central_atom_indices) } canonical_smiles", "and if not send it to a worker. \"\"\" if", "Torsion1DTask): compute = worker.compute_torsion_drive elif isinstance(task, OptimizationTask): compute = worker.compute_optimization", ") task.central_bond = (1, 2) else: canonical_smiles = canonical_molecule.to_smiles( isomeric=True,", "OptimizationTask, Torsion1DTask], redis_connection: redis.Redis, ) -> str: \"\"\"Checks to see", "compute = worker.compute_optimization elif isinstance(task, HessianTask): compute = worker.compute_hessian else:", "isomeric=True, explicit_hydrogens=True, mapped=True ) task.central_bond = (1, 2) else: canonical_smiles", "a worker. \"\"\" if isinstance(task, Torsion1DTask): compute = worker.compute_torsion_drive elif", "Torsion1DTask): map_to_atom_index = { j: i for i, j in", "compute = worker.compute_torsion_drive elif isinstance(task, OptimizationTask): compute = worker.compute_optimization elif", "cached_compute_task( task: Union[HessianTask, OptimizationTask, Torsion1DTask], redis_connection: redis.Redis, ) -> str:", "the task to improve the cache hit rate. task =", "compute.delay(task_json=task.json()).id redis_connection.hset(\"qcgenerator:types\", task_id, task.type) # Make sure to only set", "= hashlib.sha512(task.json().encode()).hexdigest() task_id = redis_connection.hget(\"qcgenerator:task-ids\", task_hash) if task_id is not", "the cache hit rate. task = _canonicalize_task(task) task_hash = hashlib.sha512(task.json().encode()).hexdigest()", "ordering to help ensure cache hits. canonical_molecule = canonical_order_atoms( Molecule.from_smiles(task.smiles,", "executed and if not send it to a worker. \"\"\"", "worker from openff.bespokefit.schema.tasks import HessianTask, OptimizationTask, Torsion1DTask from openff.bespokefit.utilities.molecule import", "import redis from openff.toolkit.topology import Molecule from openff.bespokefit.executor.services.qcgenerator import worker", "if isinstance(task, Torsion1DTask): map_to_atom_index = { j: i for i,", "hashlib from typing import TypeVar, Union import redis from openff.toolkit.topology", "in canonical_molecule.properties[\"atom_map\"].items() } central_atom_indices = sorted( map_to_atom_index[task.central_bond[i]] for i in", "= canonical_smiles return task def cached_compute_task( task: Union[HessianTask, OptimizationTask, Torsion1DTask],", "is set in case the connection # goes down before", "= redis_connection.hget(\"qcgenerator:task-ids\", task_hash) if task_id is not None: return task_id.decode()", "= canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=False ) task.smiles = canonical_smiles return", "from openff.toolkit.topology import Molecule from openff.bespokefit.executor.services.qcgenerator import worker from openff.bespokefit.schema.tasks", "mapped=True ) task.central_bond = (1, 2) else: canonical_smiles = canonical_molecule.to_smiles(", "task_hash) if task_id is not None: return task_id.decode() task_id =", "sorted( map_to_atom_index[task.central_bond[i]] for i in (0, 1) ) canonical_molecule.properties[\"atom_map\"] =", "= worker.compute_torsion_drive elif isinstance(task, OptimizationTask): compute = worker.compute_optimization elif isinstance(task,", "explicit_hydrogens=True, mapped=True ) task.central_bond = (1, 2) else: canonical_smiles =", "enumerate(central_atom_indices) } canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=True ) task.central_bond", "isinstance(task, HessianTask): compute = worker.compute_hessian else: raise NotImplementedError() # Canonicalize", "HessianTask): compute = worker.compute_hessian else: raise NotImplementedError() # Canonicalize the", "isomeric=True, explicit_hydrogens=True, mapped=False ) task.smiles = canonical_smiles return task def", "redis_connection.hget(\"qcgenerator:task-ids\", task_hash) if task_id is not None: return task_id.decode() task_id", "this information is entered and subsequently discarded. redis_connection.hset(\"qcgenerator:task-ids\", task_hash, task_id)", "} canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=True ) task.central_bond =", "is entered and subsequently discarded. redis_connection.hset(\"qcgenerator:task-ids\", task_hash, task_id) return task_id", "= task.copy(deep=True) # Ensure the SMILES has a canonical ordering", "for i, j in canonical_molecule.properties[\"atom_map\"].items() } central_atom_indices = sorted( map_to_atom_index[task.central_bond[i]]", "send it to a worker. \"\"\" if isinstance(task, Torsion1DTask): compute", "help ensure cache hits. canonical_molecule = canonical_order_atoms( Molecule.from_smiles(task.smiles, allow_undefined_stereo=True) )", "import worker from openff.bespokefit.schema.tasks import HessianTask, OptimizationTask, Torsion1DTask from openff.bespokefit.utilities.molecule", "redis.Redis, ) -> str: \"\"\"Checks to see if a QC", "typing import TypeVar, Union import redis from openff.toolkit.topology import Molecule", "task_id = compute.delay(task_json=task.json()).id redis_connection.hset(\"qcgenerator:types\", task_id, task.type) # Make sure to", "map_to_atom_index[task.central_bond[i]] for i in (0, 1) ) canonical_molecule.properties[\"atom_map\"] = {", "sure to only set the hash after the type is", "ensure cache hits. canonical_molecule = canonical_order_atoms( Molecule.from_smiles(task.smiles, allow_undefined_stereo=True) ) if", ") if isinstance(task, Torsion1DTask): map_to_atom_index = { j: i for", "worker.compute_torsion_drive elif isinstance(task, OptimizationTask): compute = worker.compute_optimization elif isinstance(task, HessianTask):", "canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=False ) task.smiles = canonical_smiles", "task has already been executed and if not send it", "the connection # goes down before this information is entered", "task_hash = hashlib.sha512(task.json().encode()).hexdigest() task_id = redis_connection.hget(\"qcgenerator:task-ids\", task_hash) if task_id is", "canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=True ) task.central_bond = (1,", "openff.toolkit.topology import Molecule from openff.bespokefit.executor.services.qcgenerator import worker from openff.bespokefit.schema.tasks import", "= worker.compute_hessian else: raise NotImplementedError() # Canonicalize the task to", "raise NotImplementedError() # Canonicalize the task to improve the cache", "1) for i, atom_index in enumerate(central_atom_indices) } canonical_smiles = canonical_molecule.to_smiles(", "canonical_order_atoms( Molecule.from_smiles(task.smiles, allow_undefined_stereo=True) ) if isinstance(task, Torsion1DTask): map_to_atom_index = {", "Union[HessianTask, OptimizationTask, Torsion1DTask], redis_connection: redis.Redis, ) -> str: \"\"\"Checks to", "task to improve the cache hit rate. task = _canonicalize_task(task)", "to a worker. \"\"\" if isinstance(task, Torsion1DTask): compute = worker.compute_torsion_drive", "type is set in case the connection # goes down", "down before this information is entered and subsequently discarded. redis_connection.hset(\"qcgenerator:task-ids\",", "elif isinstance(task, OptimizationTask): compute = worker.compute_optimization elif isinstance(task, HessianTask): compute", "in enumerate(central_atom_indices) } canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=True )", "= { atom_index: (i + 1) for i, atom_index in", "_T) -> _T: task = task.copy(deep=True) # Ensure the SMILES", "# goes down before this information is entered and subsequently", "import hashlib from typing import TypeVar, Union import redis from", "elif isinstance(task, HessianTask): compute = worker.compute_hessian else: raise NotImplementedError() #", "redis_connection.hset(\"qcgenerator:types\", task_id, task.type) # Make sure to only set the", ") -> str: \"\"\"Checks to see if a QC task", "NotImplementedError() # Canonicalize the task to improve the cache hit", "Make sure to only set the hash after the type", "case the connection # goes down before this information is", "Union import redis from openff.toolkit.topology import Molecule from openff.bespokefit.executor.services.qcgenerator import", "return task_id.decode() task_id = compute.delay(task_json=task.json()).id redis_connection.hset(\"qcgenerator:types\", task_id, task.type) # Make", "_canonicalize_task(task) task_hash = hashlib.sha512(task.json().encode()).hexdigest() task_id = redis_connection.hget(\"qcgenerator:task-ids\", task_hash) if task_id", "task_id.decode() task_id = compute.delay(task_json=task.json()).id redis_connection.hset(\"qcgenerator:types\", task_id, task.type) # Make sure", "task.type) # Make sure to only set the hash after", "if a QC task has already been executed and if", "set in case the connection # goes down before this", "for i in (0, 1) ) canonical_molecule.properties[\"atom_map\"] = { atom_index:", "has a canonical ordering to help ensure cache hits. canonical_molecule", "canonical_molecule.properties[\"atom_map\"] = { atom_index: (i + 1) for i, atom_index", "has already been executed and if not send it to", "if isinstance(task, Torsion1DTask): compute = worker.compute_torsion_drive elif isinstance(task, OptimizationTask): compute", "cache hits. canonical_molecule = canonical_order_atoms( Molecule.from_smiles(task.smiles, allow_undefined_stereo=True) ) if isinstance(task,", "task_id, task.type) # Make sure to only set the hash", "for i, atom_index in enumerate(central_atom_indices) } canonical_smiles = canonical_molecule.to_smiles( isomeric=True,", "cache hit rate. task = _canonicalize_task(task) task_hash = hashlib.sha512(task.json().encode()).hexdigest() task_id", "the type is set in case the connection # goes", "OptimizationTask): compute = worker.compute_optimization elif isinstance(task, HessianTask): compute = worker.compute_hessian", "OptimizationTask, Torsion1DTask from openff.bespokefit.utilities.molecule import canonical_order_atoms _T = TypeVar(\"_T\", HessianTask,", "Molecule from openff.bespokefit.executor.services.qcgenerator import worker from openff.bespokefit.schema.tasks import HessianTask, OptimizationTask,", "\"\"\" if isinstance(task, Torsion1DTask): compute = worker.compute_torsion_drive elif isinstance(task, OptimizationTask):", "if not send it to a worker. \"\"\" if isinstance(task,", "to only set the hash after the type is set", "else: canonical_smiles = canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=False ) task.smiles =", "task def cached_compute_task( task: Union[HessianTask, OptimizationTask, Torsion1DTask], redis_connection: redis.Redis, )", "canonical ordering to help ensure cache hits. canonical_molecule = canonical_order_atoms(", "in case the connection # goes down before this information", "goes down before this information is entered and subsequently discarded.", "task = _canonicalize_task(task) task_hash = hashlib.sha512(task.json().encode()).hexdigest() task_id = redis_connection.hget(\"qcgenerator:task-ids\", task_hash)", "openff.bespokefit.executor.services.qcgenerator import worker from openff.bespokefit.schema.tasks import HessianTask, OptimizationTask, Torsion1DTask from", "Torsion1DTask from openff.bespokefit.utilities.molecule import canonical_order_atoms _T = TypeVar(\"_T\", HessianTask, OptimizationTask,", "HessianTask, OptimizationTask, Torsion1DTask from openff.bespokefit.utilities.molecule import canonical_order_atoms _T = TypeVar(\"_T\",", "set the hash after the type is set in case", "to improve the cache hit rate. task = _canonicalize_task(task) task_hash", "Torsion1DTask], redis_connection: redis.Redis, ) -> str: \"\"\"Checks to see if", "openff.bespokefit.utilities.molecule import canonical_order_atoms _T = TypeVar(\"_T\", HessianTask, OptimizationTask, Torsion1DTask) def", "Ensure the SMILES has a canonical ordering to help ensure", "i in (0, 1) ) canonical_molecule.properties[\"atom_map\"] = { atom_index: (i", "QC task has already been executed and if not send", "canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=False ) task.smiles = canonical_smiles return task", "{ j: i for i, j in canonical_molecule.properties[\"atom_map\"].items() } central_atom_indices", "worker.compute_hessian else: raise NotImplementedError() # Canonicalize the task to improve", "HessianTask, OptimizationTask, Torsion1DTask) def _canonicalize_task(task: _T) -> _T: task =", "from openff.bespokefit.utilities.molecule import canonical_order_atoms _T = TypeVar(\"_T\", HessianTask, OptimizationTask, Torsion1DTask)", "(0, 1) ) canonical_molecule.properties[\"atom_map\"] = { atom_index: (i + 1)", "= sorted( map_to_atom_index[task.central_bond[i]] for i in (0, 1) ) canonical_molecule.properties[\"atom_map\"]", "worker. \"\"\" if isinstance(task, Torsion1DTask): compute = worker.compute_torsion_drive elif isinstance(task,", "i for i, j in canonical_molecule.properties[\"atom_map\"].items() } central_atom_indices = sorted(", "task.smiles = canonical_smiles return task def cached_compute_task( task: Union[HessianTask, OptimizationTask,", "SMILES has a canonical ordering to help ensure cache hits.", "been executed and if not send it to a worker.", "a canonical ordering to help ensure cache hits. canonical_molecule =", "explicit_hydrogens=True, mapped=False ) task.smiles = canonical_smiles return task def cached_compute_task(", "import canonical_order_atoms _T = TypeVar(\"_T\", HessianTask, OptimizationTask, Torsion1DTask) def _canonicalize_task(task:", "= canonical_molecule.to_smiles( isomeric=True, explicit_hydrogens=True, mapped=True ) task.central_bond = (1, 2)", "a QC task has already been executed and if not", "mapped=False ) task.smiles = canonical_smiles return task def cached_compute_task( task:", "task.copy(deep=True) # Ensure the SMILES has a canonical ordering to" ]
[ ":param str state_start: Verb to describe operation start :param str", "to execute :param str state_start: Verb to describe operation start", "seq_snap.add(*build_instance_sequence(instance, OP_SS_D)) if OP_SS_C in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_C)) # Init", ":param str operation: Node (lifecycle) operation to execute :param str", "if OP_SS_D in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_D)) if OP_SS_C in instance.node.operations:", "seq_start = sg_start.sequence() seq_start.add(*build_instance_sequence( instance, OP_START, 'Starting', 'Started')) # Create", "will not be built if the instance provided does not", "operation start :param str state_stop: Verb to describe operation finish", "finish ''' tasks = list() # Only build the sequence", "if not REQUIRED_OPS.issubset(node.operations): ctx.logger.warn( 'Skipping refresh_snapshots workflow for node \"%s\"", "from cloudify.plugins import lifecycle OP_START = 'hacker.interfaces.lifecycle.start' OP_STOP = 'hacker.interfaces.lifecycle.stop'", "lifecycle.is_host_node(instance): ctx.logger.warn( 'Skipping refresh_snapshots workflow for node instance ' '\"%s\"", "All Rights Reserved''' from cloudify.plugins import lifecycle OP_START = 'hacker.interfaces.lifecycle.start'", "because it is not a compute host' % instance.id) continue", "build_instance_subgraph(instance, graph): ''' Builds a subgraph for an instance :param", "`CloudifyWorkflowNodeInstance` instance: Node instance to execute tasks against :param str", "graph: Task graph to create sequences from ''' # Init", "the instance provided does not have a node with an", "OP_SS_C)) # Init a \"start instance\" subgraph sg_start = graph.subgraph('stop_subgraph')", "built if the instance provided does not have a node", "The sequence will not be built if the instance provided", "subgraph tasks for an instance .. note:: The sequence will", "= 'hacker.interfaces.lifecycle.create_snapshots' OP_SS_D = 'hacker.interfaces.lifecycle.delete_snapshots' REQUIRED_OPS = set([OP_START, OP_SS_C, OP_SS_D,", "set([OP_START, OP_SS_C, OP_SS_D, OP_STOP]) def build_instance_sequence(instance, operation, state_start=None, state_end=None): '''", "tasks.append(instance.send_event('%s host' % state_end)) tasks.append(instance.set_state(state_end.lower())) return tasks def build_instance_subgraph(instance, graph):", "create sequences from ''' # Init a \"stop instance\" subgraph", "ended state if state_end: tasks.append(instance.send_event('%s host' % state_end)) tasks.append(instance.set_state(state_end.lower())) return", "# Only build the sequence if the node operation exists", "OP_START, 'Starting', 'Started')) # Create subgraph dependencies graph.add_dependency(sg_snap, sg_stop) graph.add_dependency(sg_start,", "compute hosts and build a sequence graph for node in", ":param `TaskDependencyGraph` graph: Task graph to create sequences from '''", "% instance.id) continue build_instance_subgraph(instance, graph) # Execute the sequences return", "instance in node.instances: if not lifecycle.is_host_node(instance): ctx.logger.warn( 'Skipping refresh_snapshots workflow", "instance to execute tasks against :param str operation: Node (lifecycle)", "exists if operation not in instance.node.operations: return tasks # Add", "node.instances: if not lifecycle.is_host_node(instance): ctx.logger.warn( 'Skipping refresh_snapshots workflow for node", "workflow for node instance ' '\"%s\" because it is not", "and start the instances back up when complete. ''' graph", "' '\"%s\" because it is not a compute host' %", "all compute hosts and build a sequence graph for node", "lifecycle OP_START = 'hacker.interfaces.lifecycle.start' OP_STOP = 'hacker.interfaces.lifecycle.stop' OP_SS_C = 'hacker.interfaces.lifecycle.create_snapshots'", "graph.subgraph('snapshot_subgraph') seq_snap = sg_snap.sequence() if OP_SS_D in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_D))", "operation finish ''' tasks = list() # Only build the", "stop all host (compute) instances, delete all existing instance snapshots,", "if not lifecycle.is_host_node(instance): ctx.logger.warn( 'Skipping refresh_snapshots workflow for node instance", "'Stopping', 'Stopped')) # Init a \"recreate snapshots\" subgraph sg_snap =", "host' % state_start)) tasks.append(instance.set_state(state_start.lower())) # Add task operation tasks.append(instance.execute_operation(operation)) #", "Init a \"stop instance\" subgraph sg_stop = graph.subgraph('stop_subgraph') seq_stop =", "not have all required operations defined' % node.id) continue #", "volumes, and start the instances back up when complete. '''", "REQUIRED_OPS = set([OP_START, OP_SS_C, OP_SS_D, OP_STOP]) def build_instance_sequence(instance, operation, state_start=None,", "state_start)) tasks.append(instance.set_state(state_start.lower())) # Add task operation tasks.append(instance.execute_operation(operation)) # Add task", "# Init a \"recreate snapshots\" subgraph sg_snap = graph.subgraph('snapshot_subgraph') seq_snap", "refresh_snapshots(ctx, **_): ''' Executes a complex, graph-based set of lifecycle", "OP_SS_C, OP_SS_D, OP_STOP]) def build_instance_sequence(instance, operation, state_start=None, state_end=None): ''' Builds", "\"%s\" because ' 'it does not have all required operations", "'it does not have all required operations defined' % node.id)", "subgraph dependencies graph.add_dependency(sg_snap, sg_stop) graph.add_dependency(sg_start, sg_snap) def refresh_snapshots(ctx, **_): '''", "graph to create sequences from ''' # Init a \"stop", "seq_snap = sg_snap.sequence() if OP_SS_D in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_D)) if", "seq_start.add(*build_instance_sequence( instance, OP_START, 'Starting', 'Started')) # Create subgraph dependencies graph.add_dependency(sg_snap,", "Add task operation tasks.append(instance.execute_operation(operation)) # Add task ended state if", "the sequence if the node operation exists if operation not", "in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_D)) if OP_SS_C in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_C))", "an operation defined in the operation parameter. :param `CloudifyWorkflowNodeInstance` instance:", "Gigaspaces, 2017, All Rights Reserved''' from cloudify.plugins import lifecycle OP_START", "against :param `TaskDependencyGraph` graph: Task graph to create sequences from", "'Starting', 'Started')) # Create subgraph dependencies graph.add_dependency(sg_snap, sg_stop) graph.add_dependency(sg_start, sg_snap)", "execute tasks against :param str operation: Node (lifecycle) operation to", "host' % instance.id) continue build_instance_subgraph(instance, graph) # Execute the sequences", "if OP_SS_C in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_C)) # Init a \"start", "all host (compute) instances, delete all existing instance snapshots, take", "compute host' % instance.id) continue build_instance_subgraph(instance, graph) # Execute the", "node \"%s\" because ' 'it does not have all required", "task ended state if state_end: tasks.append(instance.send_event('%s host' % state_end)) tasks.append(instance.set_state(state_end.lower()))", "= sg_snap.sequence() if OP_SS_D in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_D)) if OP_SS_C", "instance to execute tasks against :param `TaskDependencyGraph` graph: Task graph", "for an instance .. note:: The sequence will not be", "does not have all required operations defined' % node.id) continue", "OP_STOP = 'hacker.interfaces.lifecycle.stop' OP_SS_C = 'hacker.interfaces.lifecycle.create_snapshots' OP_SS_D = 'hacker.interfaces.lifecycle.delete_snapshots' REQUIRED_OPS", "snapshots of all attached volumes, and start the instances back", "node instance ' '\"%s\" because it is not a compute", "and build a sequence graph for node in ctx.nodes: if", "return tasks # Add task starting state if state_start: tasks.append(instance.send_event('%s", "# Add task operation tasks.append(instance.execute_operation(operation)) # Add task ended state", "to execute tasks against :param str operation: Node (lifecycle) operation", "Add task ended state if state_end: tasks.append(instance.send_event('%s host' % state_end))", "'hacker.interfaces.lifecycle.create_snapshots' OP_SS_D = 'hacker.interfaces.lifecycle.delete_snapshots' REQUIRED_OPS = set([OP_START, OP_SS_C, OP_SS_D, OP_STOP])", "tasks.append(instance.send_event('%s host' % state_start)) tasks.append(instance.set_state(state_start.lower())) # Add task operation tasks.append(instance.execute_operation(operation))", "sg_snap) def refresh_snapshots(ctx, **_): ''' Executes a complex, graph-based set", "`CloudifyWorkflowNodeInstance` instance: Node instance to execute tasks against :param `TaskDependencyGraph`", "= 'hacker.interfaces.lifecycle.delete_snapshots' REQUIRED_OPS = set([OP_START, OP_SS_C, OP_SS_D, OP_STOP]) def build_instance_sequence(instance,", "# Create subgraph dependencies graph.add_dependency(sg_snap, sg_stop) graph.add_dependency(sg_start, sg_snap) def refresh_snapshots(ctx,", "in the operation parameter. :param `CloudifyWorkflowNodeInstance` instance: Node instance to", "Node (lifecycle) operation to execute :param str state_start: Verb to", "node.id) continue # Iterate over each node instance for instance", "Iterate over each node instance for instance in node.instances: if", "build a sequence graph for node in ctx.nodes: if not", "new snapshots of all attached volumes, and start the instances", "state if state_end: tasks.append(instance.send_event('%s host' % state_end)) tasks.append(instance.set_state(state_end.lower())) return tasks", "sequence graph for node in ctx.nodes: if not REQUIRED_OPS.issubset(node.operations): ctx.logger.warn(", "state_start=None, state_end=None): ''' Builds sequenced subgraph tasks for an instance", "subgraph sg_snap = graph.subgraph('snapshot_subgraph') seq_snap = sg_snap.sequence() if OP_SS_D in", "**_): ''' Executes a complex, graph-based set of lifecycle events", "sequence if the node operation exists if operation not in", "state_start: Verb to describe operation start :param str state_stop: Verb", "an instance .. note:: The sequence will not be built", "state_stop: Verb to describe operation finish ''' tasks = list()", "''' Executes a complex, graph-based set of lifecycle events to", "(lifecycle) operation to execute :param str state_start: Verb to describe", "node with an operation defined in the operation parameter. :param", "instances, delete all existing instance snapshots, take new snapshots of", "state_end=None): ''' Builds sequenced subgraph tasks for an instance ..", "REQUIRED_OPS.issubset(node.operations): ctx.logger.warn( 'Skipping refresh_snapshots workflow for node \"%s\" because '", "with an operation defined in the operation parameter. :param `CloudifyWorkflowNodeInstance`", "operation defined in the operation parameter. :param `CloudifyWorkflowNodeInstance` instance: Node", "tasks.append(instance.set_state(state_start.lower())) # Add task operation tasks.append(instance.execute_operation(operation)) # Add task ended", "subgraph sg_stop = graph.subgraph('stop_subgraph') seq_stop = sg_stop.sequence() seq_stop.add(*build_instance_sequence( instance, OP_STOP,", "= graph.subgraph('stop_subgraph') seq_stop = sg_stop.sequence() seq_stop.add(*build_instance_sequence( instance, OP_STOP, 'Stopping', 'Stopped'))", "operation exists if operation not in instance.node.operations: return tasks #", "instance.node.operations: return tasks # Add task starting state if state_start:", ":param str state_stop: Verb to describe operation finish ''' tasks", "instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_C)) # Init a \"start instance\" subgraph sg_start", "node in ctx.nodes: if not REQUIRED_OPS.issubset(node.operations): ctx.logger.warn( 'Skipping refresh_snapshots workflow", "graph.add_dependency(sg_start, sg_snap) def refresh_snapshots(ctx, **_): ''' Executes a complex, graph-based", "'hacker.interfaces.lifecycle.delete_snapshots' REQUIRED_OPS = set([OP_START, OP_SS_C, OP_SS_D, OP_STOP]) def build_instance_sequence(instance, operation,", "describe operation start :param str state_stop: Verb to describe operation", "# Iterate over each node instance for instance in node.instances:", "`TaskDependencyGraph` graph: Task graph to create sequences from ''' #", "a \"start instance\" subgraph sg_start = graph.subgraph('stop_subgraph') seq_start = sg_start.sequence()", "workflow for node \"%s\" because ' 'it does not have", "graph): ''' Builds a subgraph for an instance :param `CloudifyWorkflowNodeInstance`", "# Init a \"stop instance\" subgraph sg_stop = graph.subgraph('stop_subgraph') seq_stop", "''' Builds a subgraph for an instance :param `CloudifyWorkflowNodeInstance` instance:", "tasks def build_instance_subgraph(instance, graph): ''' Builds a subgraph for an", "operation, state_start=None, state_end=None): ''' Builds sequenced subgraph tasks for an", "not be built if the instance provided does not have", "if operation not in instance.node.operations: return tasks # Add task", "of all attached volumes, and start the instances back up", "Find all compute hosts and build a sequence graph for", "import lifecycle OP_START = 'hacker.interfaces.lifecycle.start' OP_STOP = 'hacker.interfaces.lifecycle.stop' OP_SS_C =", "' 'it does not have all required operations defined' %", "graph.subgraph('stop_subgraph') seq_start = sg_start.sequence() seq_start.add(*build_instance_sequence( instance, OP_START, 'Starting', 'Started')) #", "back up when complete. ''' graph = ctx.graph_mode() # Find", "provided does not have a node with an operation defined", "all attached volumes, and start the instances back up when", "return tasks def build_instance_subgraph(instance, graph): ''' Builds a subgraph for", "sg_snap.sequence() if OP_SS_D in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_D)) if OP_SS_C in", "sg_start.sequence() seq_start.add(*build_instance_sequence( instance, OP_START, 'Starting', 'Started')) # Create subgraph dependencies", "host (compute) instances, delete all existing instance snapshots, take new", "be built if the instance provided does not have a", "'''Copyright Gigaspaces, 2017, All Rights Reserved''' from cloudify.plugins import lifecycle", "# Init a \"start instance\" subgraph sg_start = graph.subgraph('stop_subgraph') seq_start", "state if state_start: tasks.append(instance.send_event('%s host' % state_start)) tasks.append(instance.set_state(state_start.lower())) # Add", "Builds sequenced subgraph tasks for an instance .. note:: The", "Executes a complex, graph-based set of lifecycle events to stop", "seq_stop.add(*build_instance_sequence( instance, OP_STOP, 'Stopping', 'Stopped')) # Init a \"recreate snapshots\"", "Only build the sequence if the node operation exists if", "% state_start)) tasks.append(instance.set_state(state_start.lower())) # Add task operation tasks.append(instance.execute_operation(operation)) # Add", "OP_SS_D = 'hacker.interfaces.lifecycle.delete_snapshots' REQUIRED_OPS = set([OP_START, OP_SS_C, OP_SS_D, OP_STOP]) def", "'Skipping refresh_snapshots workflow for node instance ' '\"%s\" because it", "# Add task starting state if state_start: tasks.append(instance.send_event('%s host' %", "str state_stop: Verb to describe operation finish ''' tasks =", "ctx.logger.warn( 'Skipping refresh_snapshots workflow for node \"%s\" because ' 'it", "up when complete. ''' graph = ctx.graph_mode() # Find all", "instance\" subgraph sg_start = graph.subgraph('stop_subgraph') seq_start = sg_start.sequence() seq_start.add(*build_instance_sequence( instance,", "a subgraph for an instance :param `CloudifyWorkflowNodeInstance` instance: Node instance", "Init a \"recreate snapshots\" subgraph sg_snap = graph.subgraph('snapshot_subgraph') seq_snap =", "to describe operation finish ''' tasks = list() # Only", "OP_SS_C in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_C)) # Init a \"start instance\"", "% node.id) continue # Iterate over each node instance for", "def build_instance_sequence(instance, operation, state_start=None, state_end=None): ''' Builds sequenced subgraph tasks", "lifecycle events to stop all host (compute) instances, delete all", "instance provided does not have a node with an operation", "state_end)) tasks.append(instance.set_state(state_end.lower())) return tasks def build_instance_subgraph(instance, graph): ''' Builds a", "operation tasks.append(instance.execute_operation(operation)) # Add task ended state if state_end: tasks.append(instance.send_event('%s", "operations defined' % node.id) continue # Iterate over each node", "in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_C)) # Init a \"start instance\" subgraph", "Add task starting state if state_start: tasks.append(instance.send_event('%s host' % state_start))", "sequences from ''' # Init a \"stop instance\" subgraph sg_stop", "not lifecycle.is_host_node(instance): ctx.logger.warn( 'Skipping refresh_snapshots workflow for node instance '", "in ctx.nodes: if not REQUIRED_OPS.issubset(node.operations): ctx.logger.warn( 'Skipping refresh_snapshots workflow for", "graph = ctx.graph_mode() # Find all compute hosts and build", "str operation: Node (lifecycle) operation to execute :param str state_start:", "tasks against :param `TaskDependencyGraph` graph: Task graph to create sequences", "for node instance ' '\"%s\" because it is not a", "delete all existing instance snapshots, take new snapshots of all", "OP_SS_C = 'hacker.interfaces.lifecycle.create_snapshots' OP_SS_D = 'hacker.interfaces.lifecycle.delete_snapshots' REQUIRED_OPS = set([OP_START, OP_SS_C,", "= graph.subgraph('stop_subgraph') seq_start = sg_start.sequence() seq_start.add(*build_instance_sequence( instance, OP_START, 'Starting', 'Started'))", "start the instances back up when complete. ''' graph =", "start :param str state_stop: Verb to describe operation finish '''", "sg_stop) graph.add_dependency(sg_start, sg_snap) def refresh_snapshots(ctx, **_): ''' Executes a complex,", "sequenced subgraph tasks for an instance .. note:: The sequence", "instance ' '\"%s\" because it is not a compute host'", "tasks for an instance .. note:: The sequence will not", "state_start: tasks.append(instance.send_event('%s host' % state_start)) tasks.append(instance.set_state(state_start.lower())) # Add task operation", "sequence will not be built if the instance provided does", "graph.subgraph('stop_subgraph') seq_stop = sg_stop.sequence() seq_stop.add(*build_instance_sequence( instance, OP_STOP, 'Stopping', 'Stopped')) #", "a node with an operation defined in the operation parameter.", "instance snapshots, take new snapshots of all attached volumes, and", "not have a node with an operation defined in the", "for node in ctx.nodes: if not REQUIRED_OPS.issubset(node.operations): ctx.logger.warn( 'Skipping refresh_snapshots", "host' % state_end)) tasks.append(instance.set_state(state_end.lower())) return tasks def build_instance_subgraph(instance, graph): '''", "list() # Only build the sequence if the node operation", "ctx.graph_mode() # Find all compute hosts and build a sequence", "instance.id) continue build_instance_subgraph(instance, graph) # Execute the sequences return graph.execute()", "subgraph sg_start = graph.subgraph('stop_subgraph') seq_start = sg_start.sequence() seq_start.add(*build_instance_sequence( instance, OP_START,", "to create sequences from ''' # Init a \"stop instance\"", "note:: The sequence will not be built if the instance", "instance for instance in node.instances: if not lifecycle.is_host_node(instance): ctx.logger.warn( 'Skipping", "cloudify.plugins import lifecycle OP_START = 'hacker.interfaces.lifecycle.start' OP_STOP = 'hacker.interfaces.lifecycle.stop' OP_SS_C", "seq_snap.add(*build_instance_sequence(instance, OP_SS_C)) # Init a \"start instance\" subgraph sg_start =", "all required operations defined' % node.id) continue # Iterate over", "snapshots\" subgraph sg_snap = graph.subgraph('snapshot_subgraph') seq_snap = sg_snap.sequence() if OP_SS_D", "Builds a subgraph for an instance :param `CloudifyWorkflowNodeInstance` instance: Node", "for instance in node.instances: if not lifecycle.is_host_node(instance): ctx.logger.warn( 'Skipping refresh_snapshots", "'hacker.interfaces.lifecycle.stop' OP_SS_C = 'hacker.interfaces.lifecycle.create_snapshots' OP_SS_D = 'hacker.interfaces.lifecycle.delete_snapshots' REQUIRED_OPS = set([OP_START,", "'Skipping refresh_snapshots workflow for node \"%s\" because ' 'it does", "operation: Node (lifecycle) operation to execute :param str state_start: Verb", "tasks.append(instance.execute_operation(operation)) # Add task ended state if state_end: tasks.append(instance.send_event('%s host'", "if the instance provided does not have a node with", "the node operation exists if operation not in instance.node.operations: return", "instance .. note:: The sequence will not be built if", "= 'hacker.interfaces.lifecycle.stop' OP_SS_C = 'hacker.interfaces.lifecycle.create_snapshots' OP_SS_D = 'hacker.interfaces.lifecycle.delete_snapshots' REQUIRED_OPS =", "execute :param str state_start: Verb to describe operation start :param", "execute tasks against :param `TaskDependencyGraph` graph: Task graph to create", "def refresh_snapshots(ctx, **_): ''' Executes a complex, graph-based set of", "= sg_stop.sequence() seq_stop.add(*build_instance_sequence( instance, OP_STOP, 'Stopping', 'Stopped')) # Init a", "'\"%s\" because it is not a compute host' % instance.id)", "Reserved''' from cloudify.plugins import lifecycle OP_START = 'hacker.interfaces.lifecycle.start' OP_STOP =", "''' tasks = list() # Only build the sequence if", "sg_stop = graph.subgraph('stop_subgraph') seq_stop = sg_stop.sequence() seq_stop.add(*build_instance_sequence( instance, OP_STOP, 'Stopping',", "the operation parameter. :param `CloudifyWorkflowNodeInstance` instance: Node instance to execute", "tasks.append(instance.set_state(state_end.lower())) return tasks def build_instance_subgraph(instance, graph): ''' Builds a subgraph", "dependencies graph.add_dependency(sg_snap, sg_stop) graph.add_dependency(sg_start, sg_snap) def refresh_snapshots(ctx, **_): ''' Executes", "= 'hacker.interfaces.lifecycle.start' OP_STOP = 'hacker.interfaces.lifecycle.stop' OP_SS_C = 'hacker.interfaces.lifecycle.create_snapshots' OP_SS_D =", "for node \"%s\" because ' 'it does not have all", "if state_start: tasks.append(instance.send_event('%s host' % state_start)) tasks.append(instance.set_state(state_start.lower())) # Add task", "''' Builds sequenced subgraph tasks for an instance .. note::", "Node instance to execute tasks against :param `TaskDependencyGraph` graph: Task", "instance :param `CloudifyWorkflowNodeInstance` instance: Node instance to execute tasks against", "have a node with an operation defined in the operation", ":param `CloudifyWorkflowNodeInstance` instance: Node instance to execute tasks against :param", "when complete. ''' graph = ctx.graph_mode() # Find all compute", "= sg_start.sequence() seq_start.add(*build_instance_sequence( instance, OP_START, 'Starting', 'Started')) # Create subgraph", "all existing instance snapshots, take new snapshots of all attached", "refresh_snapshots workflow for node instance ' '\"%s\" because it is", "\"start instance\" subgraph sg_start = graph.subgraph('stop_subgraph') seq_start = sg_start.sequence() seq_start.add(*build_instance_sequence(", "to stop all host (compute) instances, delete all existing instance", "Node instance to execute tasks against :param str operation: Node", "Create subgraph dependencies graph.add_dependency(sg_snap, sg_stop) graph.add_dependency(sg_start, sg_snap) def refresh_snapshots(ctx, **_):", "= set([OP_START, OP_SS_C, OP_SS_D, OP_STOP]) def build_instance_sequence(instance, operation, state_start=None, state_end=None):", "it is not a compute host' % instance.id) continue build_instance_subgraph(instance,", "does not have a node with an operation defined in", "\"recreate snapshots\" subgraph sg_snap = graph.subgraph('snapshot_subgraph') seq_snap = sg_snap.sequence() if", "because ' 'it does not have all required operations defined'", "graph-based set of lifecycle events to stop all host (compute)", "continue # Iterate over each node instance for instance in", "2017, All Rights Reserved''' from cloudify.plugins import lifecycle OP_START =", "task starting state if state_start: tasks.append(instance.send_event('%s host' % state_start)) tasks.append(instance.set_state(state_start.lower()))", "in instance.node.operations: return tasks # Add task starting state if", "a sequence graph for node in ctx.nodes: if not REQUIRED_OPS.issubset(node.operations):", "ctx.nodes: if not REQUIRED_OPS.issubset(node.operations): ctx.logger.warn( 'Skipping refresh_snapshots workflow for node", "state_end: tasks.append(instance.send_event('%s host' % state_end)) tasks.append(instance.set_state(state_end.lower())) return tasks def build_instance_subgraph(instance,", "Verb to describe operation start :param str state_stop: Verb to", "Task graph to create sequences from ''' # Init a", ".. note:: The sequence will not be built if the", "'Started')) # Create subgraph dependencies graph.add_dependency(sg_snap, sg_stop) graph.add_dependency(sg_start, sg_snap) def", "sg_start = graph.subgraph('stop_subgraph') seq_start = sg_start.sequence() seq_start.add(*build_instance_sequence( instance, OP_START, 'Starting',", "instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_D)) if OP_SS_C in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_C)) #", "to describe operation start :param str state_stop: Verb to describe", "is not a compute host' % instance.id) continue build_instance_subgraph(instance, graph)", "# Add task ended state if state_end: tasks.append(instance.send_event('%s host' %", "refresh_snapshots workflow for node \"%s\" because ' 'it does not", "a \"stop instance\" subgraph sg_stop = graph.subgraph('stop_subgraph') seq_stop = sg_stop.sequence()", "graph.add_dependency(sg_snap, sg_stop) graph.add_dependency(sg_start, sg_snap) def refresh_snapshots(ctx, **_): ''' Executes a", "(compute) instances, delete all existing instance snapshots, take new snapshots", "Verb to describe operation finish ''' tasks = list() #", "defined in the operation parameter. :param `CloudifyWorkflowNodeInstance` instance: Node instance", "to execute tasks against :param `TaskDependencyGraph` graph: Task graph to", "if state_end: tasks.append(instance.send_event('%s host' % state_end)) tasks.append(instance.set_state(state_end.lower())) return tasks def", "sg_stop.sequence() seq_stop.add(*build_instance_sequence( instance, OP_STOP, 'Stopping', 'Stopped')) # Init a \"recreate", "complete. ''' graph = ctx.graph_mode() # Find all compute hosts", "if the node operation exists if operation not in instance.node.operations:", "sg_snap = graph.subgraph('snapshot_subgraph') seq_snap = sg_snap.sequence() if OP_SS_D in instance.node.operations:", "complex, graph-based set of lifecycle events to stop all host", "a \"recreate snapshots\" subgraph sg_snap = graph.subgraph('snapshot_subgraph') seq_snap = sg_snap.sequence()", "str state_start: Verb to describe operation start :param str state_stop:", "OP_SS_D in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_D)) if OP_SS_C in instance.node.operations: seq_snap.add(*build_instance_sequence(instance,", "starting state if state_start: tasks.append(instance.send_event('%s host' % state_start)) tasks.append(instance.set_state(state_start.lower())) #", "over each node instance for instance in node.instances: if not", "take new snapshots of all attached volumes, and start the", "\"stop instance\" subgraph sg_stop = graph.subgraph('stop_subgraph') seq_stop = sg_stop.sequence() seq_stop.add(*build_instance_sequence(", "'hacker.interfaces.lifecycle.start' OP_STOP = 'hacker.interfaces.lifecycle.stop' OP_SS_C = 'hacker.interfaces.lifecycle.create_snapshots' OP_SS_D = 'hacker.interfaces.lifecycle.delete_snapshots'", "''' # Init a \"stop instance\" subgraph sg_stop = graph.subgraph('stop_subgraph')", "Rights Reserved''' from cloudify.plugins import lifecycle OP_START = 'hacker.interfaces.lifecycle.start' OP_STOP", "= graph.subgraph('snapshot_subgraph') seq_snap = sg_snap.sequence() if OP_SS_D in instance.node.operations: seq_snap.add(*build_instance_sequence(instance,", "OP_SS_D)) if OP_SS_C in instance.node.operations: seq_snap.add(*build_instance_sequence(instance, OP_SS_C)) # Init a", "instance: Node instance to execute tasks against :param str operation:", "def build_instance_subgraph(instance, graph): ''' Builds a subgraph for an instance", "existing instance snapshots, take new snapshots of all attached volumes,", "a complex, graph-based set of lifecycle events to stop all", "describe operation finish ''' tasks = list() # Only build", "instance, OP_START, 'Starting', 'Started')) # Create subgraph dependencies graph.add_dependency(sg_snap, sg_stop)", "have all required operations defined' % node.id) continue # Iterate", "= list() # Only build the sequence if the node", "instance, OP_STOP, 'Stopping', 'Stopped')) # Init a \"recreate snapshots\" subgraph", "task operation tasks.append(instance.execute_operation(operation)) # Add task ended state if state_end:", "of lifecycle events to stop all host (compute) instances, delete", "instances back up when complete. ''' graph = ctx.graph_mode() #", "'Stopped')) # Init a \"recreate snapshots\" subgraph sg_snap = graph.subgraph('snapshot_subgraph')", "set of lifecycle events to stop all host (compute) instances,", "for an instance :param `CloudifyWorkflowNodeInstance` instance: Node instance to execute", "build_instance_sequence(instance, operation, state_start=None, state_end=None): ''' Builds sequenced subgraph tasks for", "events to stop all host (compute) instances, delete all existing", "tasks = list() # Only build the sequence if the", "graph for node in ctx.nodes: if not REQUIRED_OPS.issubset(node.operations): ctx.logger.warn( 'Skipping", "OP_STOP]) def build_instance_sequence(instance, operation, state_start=None, state_end=None): ''' Builds sequenced subgraph", "a compute host' % instance.id) continue build_instance_subgraph(instance, graph) # Execute", "subgraph for an instance :param `CloudifyWorkflowNodeInstance` instance: Node instance to", "''' graph = ctx.graph_mode() # Find all compute hosts and", "snapshots, take new snapshots of all attached volumes, and start", "in node.instances: if not lifecycle.is_host_node(instance): ctx.logger.warn( 'Skipping refresh_snapshots workflow for", "Init a \"start instance\" subgraph sg_start = graph.subgraph('stop_subgraph') seq_start =", "OP_SS_D, OP_STOP]) def build_instance_sequence(instance, operation, state_start=None, state_end=None): ''' Builds sequenced", "against :param str operation: Node (lifecycle) operation to execute :param", "defined' % node.id) continue # Iterate over each node instance", "operation parameter. :param `CloudifyWorkflowNodeInstance` instance: Node instance to execute tasks", "OP_START = 'hacker.interfaces.lifecycle.start' OP_STOP = 'hacker.interfaces.lifecycle.stop' OP_SS_C = 'hacker.interfaces.lifecycle.create_snapshots' OP_SS_D", "an instance :param `CloudifyWorkflowNodeInstance` instance: Node instance to execute tasks", "instance: Node instance to execute tasks against :param `TaskDependencyGraph` graph:", "each node instance for instance in node.instances: if not lifecycle.is_host_node(instance):", "hosts and build a sequence graph for node in ctx.nodes:", "ctx.logger.warn( 'Skipping refresh_snapshots workflow for node instance ' '\"%s\" because", "not in instance.node.operations: return tasks # Add task starting state", "parameter. :param `CloudifyWorkflowNodeInstance` instance: Node instance to execute tasks against", "the instances back up when complete. ''' graph = ctx.graph_mode()", "node instance for instance in node.instances: if not lifecycle.is_host_node(instance): ctx.logger.warn(", "operation to execute :param str state_start: Verb to describe operation", "seq_stop = sg_stop.sequence() seq_stop.add(*build_instance_sequence( instance, OP_STOP, 'Stopping', 'Stopped')) # Init", "= ctx.graph_mode() # Find all compute hosts and build a", "build the sequence if the node operation exists if operation", "OP_STOP, 'Stopping', 'Stopped')) # Init a \"recreate snapshots\" subgraph sg_snap", "from ''' # Init a \"stop instance\" subgraph sg_stop =", "# Find all compute hosts and build a sequence graph", "operation not in instance.node.operations: return tasks # Add task starting", "% state_end)) tasks.append(instance.set_state(state_end.lower())) return tasks def build_instance_subgraph(instance, graph): ''' Builds", "tasks against :param str operation: Node (lifecycle) operation to execute", "required operations defined' % node.id) continue # Iterate over each", "node operation exists if operation not in instance.node.operations: return tasks", "not a compute host' % instance.id) continue build_instance_subgraph(instance, graph) #", "tasks # Add task starting state if state_start: tasks.append(instance.send_event('%s host'", "not REQUIRED_OPS.issubset(node.operations): ctx.logger.warn( 'Skipping refresh_snapshots workflow for node \"%s\" because", "attached volumes, and start the instances back up when complete.", "instance\" subgraph sg_stop = graph.subgraph('stop_subgraph') seq_stop = sg_stop.sequence() seq_stop.add(*build_instance_sequence( instance," ]
[ "# Wait until sending file while not self._sending_file: pass #", "with parameters\") def threadfunc(worker_num, fpath): self._workers_active += 1 try: recver_socket", "return printerror(\"Couldn't access file due to permission error\") except timeout:", "path of the storage dir for received files. If storage", "by using multiple workers. \"\"\" if not self.param_set: return printerror(\"Sender", "FlyterSender: \"\"\" Handles Flyter file sending processes. Note: Sends to", "storage directory\") except error: self._progress_bar.stop() return printerror(\"Error with socket\") except:", "self._progress_bar.add_progress(len(packet)) fs -= len(packet) sender_socket.send(b'\\x06') # ACK except KeyboardInterrupt: self._progress_bar.stop()", "substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS", "else: return storage_dirname DEFAULT_PACKET_SIZE = 512 def __init__(self, host_ip, main_port,", "sender on the same network. Parameters ---------- host_ip_address : str", "\\ ntohs, ntohl, htons, htonl, \\ gethostname, \\ AF_INET, SOCK_STREAM", "---------- filepath : str The filepath of the file to", "join from random import randint from secrets import token_bytes from", "self.current_val def stop(self): \"\"\"Stop the progress bar.\"\"\" self.stopped = True", "was unsuccessful\") else: # Wait for progress bar while not", "stored. \"\"\" app_dirname = dirname(__file__) appfiles_dirname = join(app_dirname, 'Flyter') if", "in self.worker_ports] wp = b''.join(wp) headers = b''.join([len_hn, hn, tok,", "return printerror(\"Error while receiving headers\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is", "printerror(\"User aborted operation\") self._recving_file = False try: # Build the", "({ProgressBar.byte_rescale(fsize)})\") # Progress bar thread self._progress_bar = ProgressBar(fsize, 40) self._progress_bar.start()", "self._workers_active = 0 self._progress_bar = ProgressBar(None) try: self.socket = socket(AF_INET,", "output.write(packet) # Clear the contents of the temp file open(wpath,", ").start() except FileNotFoundError: return printerror(\"Couldn't access file\") except PermissionError: return", "error: return printerror(\"Error with sockets\") except: self._sender_socket.send(b'\\x15') # NAK return", "{eta}\", end=\"\\r\") # Flyter Classes class FlyterSender: \"\"\" Handles Flyter", "for w in range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" )", "= int_to_bytes_s(len(self.worker_ports)) wp = [int_to_bytes_s(port) for port in self.worker_ports] wp", "port.\"\"\" while True: port = randint(10_000, 65536) with socket(AF_INET, SOCK_STREAM)", "\" f\"|{prog_bar}{spaces}| \" f\"{100*per:.1f}% \" f\"({ProgressBar.byte_rescale(d_value)}) \" f\"[{ProgressBar.byte_rescale(rate)}/s] \" f\"ETA:", "while not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully sent: {fname}\") return res", "of the file to be sent. \"\"\" if not self.param_set:", "= False self._workers_active = 0 self._progress_bar = ProgressBar(None) try: self.socket", "bar. length : :obj:`int`, optional The length of the progress", "multiple workers. Speeds up transmission rate by using multiple workers.", "timeout: self._progress_bar.stop() return printerror(\"Operation timed out\") except: self._progress_bar.stop() return printerror(f\"Error", "time from warnings import warn from sys import argv, exit,", "self.current_val = 0 self.length = length self.rate = None self.start_time", "= FlyterSender.DEFAULT_PACKET_SIZE self._recving_file = False self._workers_active = 0 self._progress_bar =", "except timeout: self._progress_bar.stop() self._recving_file = False return printerror(\"Operation timed out\")", "len_sender_hn = bytes_to_int_s(self._sender_socket.recv(2)) sender_hn = self._sender_socket.recv(len_sender_hn) self._sender_hostname = unpack_str(sender_hn) self._sender_socket.send(b'\\x06')", "printerror(\"Error with socket\") except: self._progress_bar.stop() return printerror(\"Error receiving file\") else:", "True def add_progress(self, value): \"\"\" Count new progress. Parameter ---------", "port, workers) receiver.send_param_set() receiver.recv_file() if __name__ == '__main__': parser =", "as f: while self._recving_file and f.writable() and fs: packet =", "and fs: end_size = f.tell() + self._packet_size size = (self._packet_size", "self.transfer_type == 'M': res = self._recv_m() else: res = None", "timedelta(seconds=eta_s) if eta_s is not None else '?' clear_line =", "{errormsg}') exit(-1) exit(-1) exit(-1) exit(-1) else: warn(errormsg) def printalert(alert): \"\"\"Print", "self.socket.recv(1) == b'\\x06' # ACK except KeyboardInterrupt: return printerror(\"User aborted", "The main TCP port of the receiver. \"\"\" DEFAULT_PACKET_SIZE =", "False self._workers_active = 0 self._progress_bar = ProgressBar(None) try: self.socket =", "self.param_set = True class FlyterReciever: \"\"\" Handles Flyter file receiving", "True def recv_file(self): \"\"\"Receive a file.\"\"\" if not self.param_set: return", "port. filepath : str The path to the file to", "randint(10_000, 65536) with socket(AF_INET, SOCK_STREAM) as sock: try: sock.bind((host, port))", "self._progress_bar.stop() self._recving_file = False return printerror(\"Receiving file was unsuccessful\") else:", "\"\"\" Send file to receiver on the same network. Parameters", "self._sender_filename = None self._sender_filesizes = None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._recving_file", "bytes.fromhex(res) def bytes_to_int_l(byteseq): \"\"\"Convert byte sequence to 32 - but", "= True try: size = 0 for w in range(num_workers):", "of workers to be used during transmission. \"\"\" @staticmethod def", "random available TCP port.\"\"\" while True: port = randint(10_000, 65536)", "< (3, 6): warn('[!] Some features are not be compatible", "= None self._sender_token = None self._sender_filename = None self._sender_filesizes =", "\"Progress: \" f\"|{prog_bar}{spaces}| \" f\"{100*per:.1f}% \" f\"({ProgressBar.byte_rescale(d_value)}) \" f\"[{ProgressBar.byte_rescale(rate)}/s] \"", "of the subdirectory where that host's sent files are stored.", "file_sizes): \"\"\" Send a file with multiple workers. Speeds up", "with the version of your ' 'python interpreter') FROMTERMINAL =", "answer.lower() == 'y': self._sender_socket.send(b'\\x06') # ACK else: self._sender_socket.send(b'\\x06') # NAK", "in self.workers: w.close() def _recv_s(self): \"\"\"Receive a file with a", "printerror(errormsg): \"\"\"Print an error message.\"\"\" global FROMTERMINAL if FROMTERMINAL: print(f'\\n[x]", "permission \" \"error\") except timeout: self._progress_bar.stop() self._sending_file = False return", "DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "the same network using raw sockets. Doesn't use encryption. \"\"\"", "recv_parser.add_argument('-i', '--ip', required=True, help=\"Host IP address\") recv_parser.add_argument('-p', '--port', type=int, required=True,", "to bytes for packing.\"\"\" res = ntohl(integer) res = hex(res)[2:]", "with receiver's parameters\") # Headers try: tok = self._sender_socket.recv(6) b64_tok", "persons to whom the Software is furnished to do so,", "the version of your ' 'python interpreter') FROMTERMINAL = False", "ACK except timeout: self._progress_bar.stop() return printerror(\"Operation timed out\") except FileNotFoundError:", "Handles Flyter file receiving processes. Note: Receives from FlyterSender instances.", "sub-command.\" ) subparsers = parser.add_subparsers( dest=\"action\", help=\"The action to be", "of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "be sent. \"\"\" if not self.param_set: return printerror(\"Not yet set", "with sockets\") except: self._progress_bar.stop() self._recving_file = False return printerror(\"Error while", "import dirname, exists, join from random import randint from secrets", "packet: break self.socket.send(packet) assert self.socket.recv(1) == b'\\x06' # ACK self._progress_bar.add_progress(len(packet))", "import altsep, sep, \\ mkdir, stat, unlink from os.path import", "pass self._progress_bar.display() print(f\"\\nSuccessfully sent: {fname}\") return res def recv_param_set(self): \"\"\"", "KeyboardInterrupt: return printerror(\"User aborted operation\") except AssertionError: return printerror(\"Receiver rejected\")", "self.socket = socket(AF_INET, SOCK_STREAM) self.socket.bind((self.host_ip, self.main_port)) self.socket.settimeout(60) self.workers = [", "# Start sending res = None try: if self._transfer_type ==", "== 'y': self._sender_socket.send(b'\\x06') # ACK else: self._sender_socket.send(b'\\x06') # NAK return", "def _send_s(self, filepath, file_size): \"\"\" Send a file with a", "during data transmissions. \"\"\" try: self.socket.connect((self.recver_ip, self.main_port)) except error: return", "else 0 r_bytes = round(data/pow(2, 10*p), precision) return f\"{r_bytes}{scale[p]}\" def", "printerror(\"User aborted operation\") self._sending_file = False return True def send_file(self,", "b''.join(wp) headers = b''.join([len_hn, hn, tok, tr_type, len_wp, wp]) except:", "join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) with open(wpath, 'br') as temp: packet", "from random import randint from secrets import token_bytes from shutil", "shutil import get_terminal_size from socket import \\ socket, error, timeout,", "fsizes = [fsize//num_w for w in range(num_w)] fsizes[-1] += fsize", "socket, error, timeout, \\ ntohs, ntohl, htons, htonl, \\ gethostname,", "the temp file open(wpath, 'bw').close() # Delete the temp files", "return printerror(\"Not yet set with receiver's parameters\") # Headers try:", "recv_parser = subparsers.add_parser(\"recv\") send_parser.add_argument('-i', '--ip', required=True, help=\"Target receiver's IP address\")", "= FlyterReciever(host_ip_address, port, workers) receiver.send_param_set() receiver.recv_file() if __name__ == '__main__':", "self.start_value d_max_value = self.max_value - self.start_value d_time = time() -", "host_ip, main_port, num_workers): self.host_ip = host_ip self.main_port = main_port self.token", "recv_parser.add_argument('-p', '--port', type=int, required=True, help=\"TCP port to listen on\") recv_parser.add_argument('-w',", "self.socket.send(b'\\x15') # NAK return printerror(\"Error getting parameters from receiver\") else:", "self.current_val - self.start_value d_max_value = self.max_value - self.start_value d_time =", "except PermissionError: return printerror(\"Couldn't access file due to permission error\")", "len_wp, wp]) except: return printerror(\"Error building headers\") try: self._sender_socket.send(headers) assert", "using multiple workers. Parameters ---------- filepath : str The filepath", "file\") while self._workers_active: try: pass except KeyboardInterrupt: self._progress_bar.stop() self._sending_file =", "unpack Receiver's parameter settings. Used to set Sender's parameter settings", "= max_value self.current_val = 0 self.length = length self.rate =", "the file to be sent. \"\"\" sender = FlyterSender(ip_address, port)", "sender\") try: len_sender_hn = bytes_to_int_s(self._sender_socket.recv(2)) sender_hn = self._sender_socket.recv(len_sender_hn) self._sender_hostname =", "None self._sender_filename = None self._sender_filesizes = None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE", "- bit integer to bytes for packing.\"\"\" res = ntohs(integer)", "\"\"\" if not self.param_set: return printerror(\"Not yet set with receiver's", "files for w in range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\"", "an error message.\"\"\" global FROMTERMINAL if FROMTERMINAL: print(f'\\n[x] {errormsg}') exit(-1)", "= [bytes_to_int_s(self.socket.recv(2)) for w in range(len_wp)] self.socket.send(b'\\x06') # ACK except", "creates it first. Parameters ---------- hostname : str The name", "furnished to do so, subject to the following conditions: The", "res def send_param_set(self): \"\"\" Pack and send Receiver's parameter settings.", "(c) 2021 <NAME> Permission is hereby granted, free of charge,", "16 - bit integer to bytes for packing.\"\"\" res =", "receiver.recv_file() if __name__ == '__main__': parser = ArgumentParser( prog=\"Flyter\", epilog=\"See", "timed out\") except FileNotFoundError: self._progress_bar.stop() return printerror(\"Downloading file has been", "THE SOFTWARE. \"\"\" from argparse import ArgumentParser from base64 import", "w in range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) with", "socket): self.socket.close() def _send_s(self, filepath, file_size): \"\"\" Send a file", "= bytes.hex(byteseq) res = int(res, 16) return htons(res) def int_to_bytes_l(integer):", "\"Software\"), to deal in the Software without restriction, including without", "self.current_val >= self.max_value or self.stopped def start(self): \"\"\"Start the progress", "network. Parameters ---------- host_ip_address : str The receiver's host IP", "\" \"*(get_terminal_size().columns - 1) print(f\"{clear_line}\\r\" \"Progress: \" f\"|{prog_bar}{spaces}| \" f\"{100*per:.1f}%", "FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) fs = self._sender_filesizes[0] with open(path, 'bw') as", "except timeout: self._progress_bar.stop() self._sending_file = False return printerror(\"Operation timed out\")", "for sender\") try: len_sender_hn = bytes_to_int_s(self._sender_socket.recv(2)) sender_hn = self._sender_socket.recv(len_sender_hn) self._sender_hostname", "self._sender_filesizes[0] with open(path, 'bw') as f: while self._recving_file and fs:", "FlyterReciever: \"\"\" Handles Flyter file receiving processes. Note: Receives from", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "for w in self.workers: w.close() def _recv_s(self): \"\"\"Receive a file", "eta_s is not None else '?' clear_line = \" \"*(get_terminal_size().columns", "Wait until sending file while not self._sending_file: pass # Display", "= False return printerror(\"Error with sockets\") except: self._progress_bar.stop() self._recving_file =", "'--ip', required=True, help=\"Host IP address\") recv_parser.add_argument('-p', '--port', type=int, required=True, help=\"TCP", "KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False return printerror(\"User aborted operation\") except", "# Headers try: tok = self.token num_w = max(1, len(self._worker_ports))", "join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) Thread( target=threadfunc, args=(w, wpath), ).start() except", "single worker. Parameters ---------- filepath : str The filepath to", "not self._recving_file: pass # Display until file is received while", "except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access file due to permission", "# Progress bar thread self._progress_bar = ProgressBar(fsize, 40) self._progress_bar.start() def", "wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) Thread( target=threadfunc, args=(w, wpath),", "w in range(num_workers): self.workers[w].bind((self.host_ip, self.worker_ports[w])) self.workers[w].settimeout(60) except: printerror('Error initializing sockets')", "- self.start_value d_max_value = self.max_value - self.start_value d_time = time()", "fn self._sender_filesizes = fs # Start receiving try: if self.transfer_type", "'bw') as f: while self._recving_file and fs: packet = self._sender_socket.recv(self._packet_size)", "while True: port = randint(10_000, 65536) with socket(AF_INET, SOCK_STREAM) as", "Parameters ---------- max_value : int, float The upper limit of", "files (the \"Software\"), to deal in the Software without restriction,", "SOCK_STREAM) self.socket.settimeout(60) except: printerror('Error initializing sockets') self.param_set = False def", "return printerror(\"Error while preparing headers\") try: b64_tok = b64encode(self._recver_token).decode() printalert(f\"Sending", "and fs: packet = self._sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet)", "the rights to use, copy, modify, merge, publish, distribute, sublicense,", "+= fsize - sum(fsizes) fn = pack_str(fname) len_fn = int_to_bytes_s(len(fn))", "unlink from os.path import dirname, exists, join from random import", "join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) unlink(wpath) except PermissionError: self._sender_socket.send(b'\\x15') # NAK", "self._recving_file = False return printerror(\"Receiving file was unsuccessful\") else: self._sender_socket.send(b'\\x06')", "self._progress_bar.stop() return printerror(\"Operation timed out\") except: self._progress_bar.stop() return printerror(f\"Error while", "FileNotFoundError: self._progress_bar.stop() return printerror(\"Downloading file has been deleted\") except PermissionError:", "= parser.parse_args() if args.action == \"send\": send(args.ip, args.port, args.file) elif", "the same network. Parameters ---------- ip_address : str The target", "software and associated documentation files (the \"Software\"), to deal in", "start(self): \"\"\"Start the progress bar.\"\"\" self.stopped = False self.start_time =", "len(self.workers) self._recving_file = True try: for w in range(len(self.worker_ports)): wpath", ": int The target receiver's main TCP port. filepath :", "help=\"TCP port to listen on\") if len(argv) > 1: FROMTERMINAL", "is hereby granted, free of charge, to any person obtaining", "NAK return printalert(\"Rejected file transfer\") except error: return printerror(\"Sender isn't", "__name__ == '__main__': parser = ArgumentParser( prog=\"Flyter\", epilog=\"See '<command> --help'", "per = d_value/d_max_value prog = int(self.length*per) extra = self.length*round(per) >", "AssertionError: self._progress_bar.stop() return printerror(\"Receiver rejected packet\") except FileNotFoundError: self._progress_bar.stop() return", "self.stopped = False self.start_time = time() self.start_value = self.current_val def", "printerror(\"Error building headers\") try: self._sender_socket.send(headers) assert self._sender_socket.recv(1) == b'\\x06' #", "with open(path, 'bw') as f: while self._recving_file and fs: packet", "print(f\"{clear_line}\\r\" \"Progress: \" f\"|{prog_bar}{spaces}| \" f\"{100*per:.1f}% \" f\"({ProgressBar.byte_rescale(d_value)}) \" f\"[{ProgressBar.byte_rescale(rate)}/s]", "= (self._packet_size - max(0, end_size - end)) packet = f.read(size)", "Display until file is received while not self._progress_bar.done: self._progress_bar.display() except:", "progress bar while not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully sent: {fname}\")", "to the following conditions: The above copyright notice and this", "int_to_bytes_s(num_w) headers = b''.join([tok, len_fn, fn, len_fs, fs]) except: return", "not yet set with parameters\") def threadfunc(worker_num, fpath): self._workers_active +=", "b64encode(tok).decode() len_fn = bytes_to_int_s(self._sender_socket.recv(2)) fn = unpack_str(self._sender_socket.recv(len_fn)) len_fs = bytes_to_int_s(self._sender_socket.recv(2))", "b'\\x06' # ACK except AssertionError: return printerror(\"Receiver rejected handshake\") except", "= socket(AF_INET, SOCK_STREAM) self.socket.bind((self.host_ip, self.main_port)) self.socket.settimeout(60) self.workers = [ socket(AF_INET,", "access storage directory\") except error: self._progress_bar.stop() return printerror(\"Error with socket\")", "a file. Parameters ---------- filepath : str The filepath of", "'<command> --help' to read about a specific sub-command.\" ) subparsers", "timedelta from math import log from os import altsep, sep,", "\"\"\" __version__ = (0, 0, 0) __author__ = \"CryptoNyxz\" __license__", "self._progress_bar.stop() self._sending_file = False return printerror(\"Operation timed out\") except: self._progress_bar.stop()", "permit persons to whom the Software is furnished to do", "self._progress_bar.stop() self._sending_file = False return printerror(f\"Error while sending file\") finally:", "return printerror(\"Error while receiving file\") finally: self._workers_active -= 1 num_workers", "join(appfiles_dirname, 'Received Files') if not exists(storage_dirname): mkdir(storage_dirname) if hostname: host_storage_dirname", "exists, join from random import randint from secrets import token_bytes", "Return the path of the storage dir for received files.", "do so, subject to the following conditions: The above copyright", "an alert message.\"\"\" global FROMTERMINAL print(f'[!] {alert}') def int_to_bytes_s(integer): \"\"\"Convert", "return printerror(\"Error getting connected with socket\") except: self.socket.send(b'\\x15') # NAK", "any person obtaining a copy of this software and associated", "main_port : int The main TCP port of the receiver.", "self._workers_active: try: pass except KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False return", "operation\") except timeout: self._progress_bar.stop() self._recving_file = False return printerror(\"Operation timed", "headers = b''.join([tok, len_fn, fn, len_fs, fs]) except: return printerror(\"Error", "address\") recv_parser.add_argument('-p', '--port', type=int, required=True, help=\"TCP port to listen on\")", "sock.bind((host, port)) except error: continue else: return port def printerror(errormsg):", "subdirectory where that host's sent files are stored. \"\"\" app_dirname", "class FlyterSender: \"\"\" Handles Flyter file sending processes. Note: Sends", "send_parser = subparsers.add_parser(\"send\") recv_parser = subparsers.add_parser(\"recv\") send_parser.add_argument('-i', '--ip', required=True, help=\"Target", "\"\"\"Return a random available TCP port.\"\"\" while True: port =", "progress_thread(): try: # Wait until sending file while not self._sending_file:", "False return printerror(\"Couldn't access file\") except PermissionError: self._progress_bar.stop() self._sending_file =", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT", "rejected handshake\") except timeout: return printerror('Operation timed out') except: return", "self.workers = [ socket(AF_INET, SOCK_STREAM) for w in range(num_workers) ]", "= file_size with open(filepath, 'br') as f: while self._sending_file and", "False return printerror(\"User aborted operation\") self._sending_file = False return True", "with socket\") except: self.socket.send(b'\\x15') # NAK return printerror(\"Error getting parameters", "error: self._progress_bar.stop() self._recving_file = False return printerror(\"Error with sockets\") except:", "return printerror(\"File doesn't exist\") # Headers try: tok = self.token", "self.worker_ports[w])) self.workers[w].settimeout(60) except: printerror('Error initializing sockets') self.param_set = False def", "FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) with open(path, 'bw') as output: for w", "# Utility Classes class ProgressBar: \"\"\" For displaying progress bars.", "w in range(num_workers): Thread( target=threadfunc, args=( w, filepath, size, size", "f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet) self._sender_socket.send(b'\\x06') # ACK except timeout:", "while self._sending_file and fs: packet = f.read(self._packet_size) if not packet:", "= b64encode(tok).decode() len_fn = bytes_to_int_s(self._sender_socket.recv(2)) fn = unpack_str(self._sender_socket.recv(len_fn)) len_fs =", "unpack_str(sender_hn) self._sender_socket.send(b'\\x06') # ACK except timeout: return printerror(\"Operation timed out\")", ") with open(wpath, 'br') as temp: packet = True while", "= int_to_bytes_s(len(sender_hn)) self.socket.send(b''.join([len_sender_hn, sender_hn])) assert self.socket.recv(1) == b'\\x06' # ACK", ": str The IP address of the receiver. main_port :", "error: return printerror(\"Can't connect to \" f\"{self.recver_ip}:{self.main_port}\") try: sender_hn =", "network using raw sockets. Doesn't use encryption. \"\"\" __version__ =", "return printerror(\"Couldn't access file due to permission error\") while self._workers_active:", "including without limitation the rights to use, copy, modify, merge,", "length self.rate = None self.start_time = None self.start_value = None", "if not exists(appfiles_dirname): mkdir(appfiles_dirname) storage_dirname = join(appfiles_dirname, 'Received Files') if", "int_to_bytes_s(len(hn)) tok = self.token tr_type = pack_str(self.transfer_type) len_wp = int_to_bytes_s(len(self.worker_ports))", "with socket\") except: self._progress_bar.stop() return printerror(\"Error receiving file\") else: self._recving_file", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "def unpack_str(byteseq): \"\"\"Unpack a byte sequence into a string.\"\"\" return", "FileNotFoundError: return printerror(\"Couldn't access file\") except PermissionError: return printerror(\"Couldn't access", "SOCK_STREAM) self.socket.bind((self.host_ip, self.main_port)) self.socket.settimeout(60) self.workers = [ socket(AF_INET, SOCK_STREAM) for", "workers to be used during transmission. \"\"\" @staticmethod def storage_dir(hostname=None):", "hn, tok, tr_type, len_wp, wp]) except: return printerror(\"Error building headers\")", "size = 0 for w in range(num_workers): Thread( target=threadfunc, args=(", "num_workers > 1 else [] self._sender_socket = None self._sender_hostname =", "sockets') self.param_set = False def __del__(self): if isinstance(self.__dict__.get('socket'), socket): self.socket.close()", "self.start_time = time() self.start_value = self.current_val def stop(self): \"\"\"Stop the", "except timeout: self._progress_bar.stop() return printerror(\"Operation timed out\") except FileNotFoundError: self._progress_bar.stop()", "until file is received while not self._progress_bar.done: self._progress_bar.display() except: return", "send: {fn} \" f\"({ProgressBar.byte_rescale(fs_all)}). \" \"Accept? (y/n) \") if answer.lower()", "except error: return printerror(\"Error getting connected with socket\") except: self.socket.send(b'\\x15')", "[bytes_to_int_s(self.socket.recv(2)) for w in range(len_wp)] self.socket.send(b'\\x06') # ACK except error:", "thread\") Thread(target=progress_thread).start() self._sender_token = tok self._sender_filename = fn self._sender_filesizes =", "read about a specific sub-command.\" ) subparsers = parser.add_subparsers( dest=\"action\",", "help=\"The action to be performed\" ) send_parser = subparsers.add_parser(\"send\") recv_parser", "= subparsers.add_parser(\"recv\") send_parser.add_argument('-i', '--ip', required=True, help=\"Target receiver's IP address\") send_parser.add_argument('-p',", "without limitation the rights to use, copy, modify, merge, publish,", "% 2) + res return bytes.fromhex(res) def bytes_to_int_l(byteseq): \"\"\"Convert byte", "self._recving_file = True try: for w in range(len(self.worker_ports)): wpath =", "available anymore\") except: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error while receiving", "and fs: packet = f.read(self._packet_size) if not packet: break self.socket.send(packet)", "size += file_sizes[w] except FileNotFoundError: return printerror(\"Couldn't access file\") except", "try: recver_socket = self.workers[worker_num] recver_socket.listen(1) sender_socket, hostaddr = recver_socket.accept() send_tok", "--------- value : int, float Added progress value. \"\"\" if", "os import altsep, sep, \\ mkdir, stat, unlink from os.path", "len_fn, fn, len_fs, fs]) except: return printerror(\"Error while preparing headers\")", "= self.socket.accept() except timeout: return printerror(\"No sender available\") except: return", "Added progress value. \"\"\" if self.stopped: return self.current_val += value", "= False return printerror(\"Operation timed out\") except error: self._progress_bar.stop() self._recving_file", "self.socket.settimeout(60) self.workers = [ socket(AF_INET, SOCK_STREAM) for w in range(num_workers)", "Send a file with a single worker. Parameters ---------- filepath", "def start(self): \"\"\"Start the progress bar.\"\"\" self.stopped = False self.start_time", "file.\"\"\" if not self.param_set: return printerror(\"Not yet set with receiver's", "not self.param_set: return printerror(\"Not yet set with receiver's parameters\") #", "pass # Display until file is sent while not self._progress_bar.done:", "num_workers = len(self._worker_ports) self._sending_file = True try: size = 0", "string.encode() def unpack_str(byteseq): \"\"\"Unpack a byte sequence into a string.\"\"\"", "# Flyter Classes class FlyterSender: \"\"\" Handles Flyter file sending", ": int, float The upper limit of the progress bar.", "return True def _recv_m(self): \"\"\" Receive a file with multiple", "base64 import b64encode from datetime import timedelta from math import", "wp]) except: return printerror(\"Error building headers\") try: self._sender_socket.send(headers) assert self._sender_socket.recv(1)", "permission error\") except timeout: self._progress_bar.stop() return printerror(\"Operation timed out\") except:", "directory\") except error: self._progress_bar.stop() return printerror(\"Error with socket\") except: self._progress_bar.stop()", "Parameters ---------- filepath : str The filepath to the file", "\"\"\"Convert 32 - but integer to bytes for packing.\"\"\" res", "deal in the Software without restriction, including without limitation the", "port to be used. num_workers : int The amount of", "int The main TCP port of the receiver. \"\"\" DEFAULT_PACKET_SIZE", "# ACK except timeout: self._progress_bar.stop() return printerror(\"Operation timed out\") except", "progress bar.\"\"\" self.stopped = True def add_progress(self, value): \"\"\" Count", "temp files for w in range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname),", "workers to use. \"\"\" receiver = FlyterReciever(host_ip_address, port, workers) receiver.send_param_set()", "if __name__ == '__main__': parser = ArgumentParser( prog=\"Flyter\", epilog=\"See '<command>", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR", "SOCK_STREAM from threading import Thread from time import time from", "Receive a file with multiple workers. Speeds up transmission rate", "f: f.seek(start) while self._sending_file and fs: end_size = f.tell() +", "self.param_set: return printerror(\"Sender not yet set with parameters\") def threadfunc(worker_num,", "else 'M' self.worker_ports = [ random_port(self.host_ip) for w in range(num_workers)", "with socket(AF_INET, SOCK_STREAM) as sock: try: sock.bind((host, port)) except error:", "- start with open(fpath, 'br') as f: f.seek(start) while self._sending_file", "return string.encode() def unpack_str(byteseq): \"\"\"Unpack a byte sequence into a", "to be used. num_workers : int The amount of workers", "return printerror(\"Receiving file was unsuccessful\") else: self._sender_socket.send(b'\\x06') # ACK #", "is not None else '?' clear_line = \" \"*(get_terminal_size().columns -", "self.start_time = None self.start_value = None self.stopped = False @property", "storage dir for received files. If storage directory doesn't exist,", "= True try: for w in range(len(self.worker_ports)): wpath = join(", "def progress_thread(): try: # Wait until receiving file while not", "progress bar.\"\"\" self.stopped = False self.start_time = time() self.start_value =", "\"\"\" MIT License Copyright (c) 2021 <NAME> Permission is hereby", "+= 1 try: recver_socket = self.workers[worker_num] recver_socket.listen(1) sender_socket, hostaddr =", "file\") except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access file due to", "timeout, \\ ntohs, ntohl, htons, htonl, \\ gethostname, \\ AF_INET,", "str The filepath to the file to be sent. \"\"\"", "---------- host_ip : str The Host IP address to be", "== 'S': res = self._recv_s() elif self.transfer_type == 'M': res", "args=(w, wpath), ).start() except FileNotFoundError: return printerror(\"Couldn't access file\") except", "self._progress_bar.done: self._progress_bar.display() except: return printerror(\"Error with progress thread\") Thread(target=progress_thread).start() self._sender_token", "path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) with open(path, 'bw') as", "False return printerror(\"Operation timed out\") except error: self._progress_bar.stop() self._recving_file =", "self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._sending_file = False self._workers_active = 0 self._progress_bar", "DEFAULT_PACKET_SIZE = 512 def __init__(self, host_ip, main_port, num_workers): self.host_ip =", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "None self.stopped = False @property def done(self): \"\"\"Return if already", "parameter settings used during data transmissions. \"\"\" try: self.socket.connect((self.recver_ip, self.main_port))", "if d_time else float('inf') eta_s = round((d_max_value - d_value)/rate) if", "transfer\") except error: return printerror(\"Sender isn't available anymore\") except: self._sender_socket.send(b'\\x15')", "using raw sockets. Doesn't use encryption. \"\"\" __version__ = (0,", "self._sender_socket.send(b'\\x06') # ACK else: self._sender_socket.send(b'\\x06') # NAK return printalert(\"Rejected file", "math import log from os import altsep, sep, \\ mkdir,", "error\") except: return printerror(\"Error while starting to send file\") while", "operation\") except AssertionError: return printerror(\"Receiver rejected\") except timeout: return printerror(\"Operation", "to accept file\") assert self.socket.recv(1) == b'\\x06' # ACK except", "res = None except: self._progress_bar.stop() self._recving_file = False return printerror(\"Receiving", "WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT", "num_w = max(1, len(self._worker_ports)) fpath = filepath.replace(altsep, sep) fname =", "self.start_value = self.current_val def stop(self): \"\"\"Stop the progress bar.\"\"\" self.stopped", "w, filepath, size, size + file_sizes[w] ), ).start() size +=", "return printerror(\"File doesn't exist\") self._sending_file = True try: fs =", "TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE", "self._worker_ports[worker_num]) ) sock.send(self.token) assert sock.recv(1) == b'\\x06' # ACK fs", "except: return printerror(\"Error during handshake\") try: len_hn = bytes_to_int_s(self.socket.recv(2)) self._recver_hostname", "error message.\"\"\" global FROMTERMINAL if FROMTERMINAL: print(f'\\n[x] {errormsg}') exit(-1) exit(-1)", "= None self._transfer_type = None self._worker_ports = None self._packet_size =", "parameters\") if not exists(filepath): return printerror(\"File doesn't exist\") self._sending_file =", "return printerror(\"Sender not yet set with parameters\") def threadfunc(worker_num, fpath):", "Copyright (c) 2021 <NAME> Permission is hereby granted, free of", "== self._sender_token: sender_socket.send(b'\\x06') # ACK else: sender_socket.send(b'\\x15') # NAK fs", "FileNotFoundError: self._progress_bar.stop() self._sending_file = False return printerror(\"Couldn't access file\") except", "== '__main__': parser = ArgumentParser( prog=\"Flyter\", epilog=\"See '<command> --help' to", "__author__ = \"CryptoNyxz\" __license__ = \"\"\" MIT License Copyright (c)", "else: self._sending_file = False return True def _send_m(self, filepath, file_sizes):", "mkdir(storage_dirname) if hostname: host_storage_dirname = join(storage_dirname, hostname) if not exists(host_storage_dirname):", "if self.stopped: return d_value = self.current_val - self.start_value d_max_value =", "except PermissionError: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Couldn't save file due", "-= len(packet) sender_socket.send(b'\\x06') # ACK except KeyboardInterrupt: self._progress_bar.stop() self._recving_file =", "return printerror(\"Error with progress thread\") Thread(target=progress_thread).start() self._sender_token = tok self._sender_filename", "building headers\") try: self._sender_socket.send(headers) assert self._sender_socket.recv(1) == b'\\x06' # ACK", "merge, publish, distribute, sublicense, and/or sell copies of the Software,", "def stop(self): \"\"\"Stop the progress bar.\"\"\" self.stopped = True def", "num_workers = len(self.workers) self._recving_file = True try: for w in", "integer to bytes for packing.\"\"\" res = ntohs(integer) res =", "so, subject to the following conditions: The above copyright notice", "charge, to any person obtaining a copy of this software", "printerror(\"Error while waiting for sender\") try: len_sender_hn = bytes_to_int_s(self._sender_socket.recv(2)) sender_hn", "self.stopped = True def add_progress(self, value): \"\"\" Count new progress.", "printerror(\"Error while saving file\") else: return True def recv_file(self): \"\"\"Receive", "thread\") Thread(target=progress_thread).start() # Start sending res = None try: if", "d_value/d_max_value prog = int(self.length*per) extra = self.length*round(per) > prog prog_bar", "fpath, start, end): self._workers_active += 1 try: with socket(AF_INET, SOCK_STREAM)", "w in range(num_w)] fsizes[-1] += fsize - sum(fsizes) fn =", "Thread( target=threadfunc, args=(w, wpath), ).start() except FileNotFoundError: return printerror(\"Couldn't access", "return printerror(\"Error with progress thread\") Thread(target=progress_thread).start() # Start sending res", "None self._sender_hostname = None self._sender_token = None self._sender_filename = None", "file has been deleted\") except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access", "self._sender_filename ) fs = self._sender_filesizes[0] with open(path, 'bw') as f:", "printerror(\"Couldn't access file due to permission error\") except timeout: self._progress_bar.stop()", "sender_socket.send(b'\\x06') # ACK else: sender_socket.send(b'\\x15') # NAK fs = self._sender_filesizes[worker_num]", "timed out\") except Exception: return printerror(\"Error while sending headers to", "on\") recv_parser.add_argument('-w', '--workers', type=int, default=1, help=\"TCP port to listen on\")", "packet\") except FileNotFoundError: self._progress_bar.stop() return printerror(\"Couldn't access file\") except PermissionError:", "single worker.\"\"\" if not self.param_set: return printerror(\"Sender not yet set", "permission notice shall be included in all copies or substantial", "0) __author__ = \"CryptoNyxz\" __license__ = \"\"\" MIT License Copyright", "{alert}') def int_to_bytes_s(integer): \"\"\"Convert 16 - bit integer to bytes", "self._recving_file and f.writable() and fs: packet = sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet))", "self._sender_socket.recv(len_sender_hn) self._sender_hostname = unpack_str(sender_hn) self._sender_socket.send(b'\\x06') # ACK except timeout: return", "exit(-1) else: warn(errormsg) def printalert(alert): \"\"\"Print an alert message.\"\"\" global", "as temp: packet = True while packet: packet = temp.read(self._packet_size)", "None except: self._progress_bar.stop() self._recving_file = False return printerror(\"Receiving file was", "timeout: self._progress_bar.stop() self._recving_file = False return printerror(\"Operation timed out\") except", "unsuccessful\") else: # Wait for progress bar while not self._progress_bar.done:", "into a byte sequence.\"\"\" return string.encode() def unpack_str(byteseq): \"\"\"Unpack a", "hostname) if not exists(host_storage_dirname): mkdir(host_storage_dirname) return host_storage_dirname else: return storage_dirname", "\"\"\" Send a file. Parameters ---------- filepath : str The", "fs = self._sender_filesizes[0] with open(path, 'bw') as f: while self._recving_file", "while self._sending_file and fs: end_size = f.tell() + self._packet_size size", "getting parameters from receiver\") else: self.param_set = True class FlyterReciever:", "printerror(\"Sender isn't available anymore\") except: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error", "@property def done(self): \"\"\"Return if already finished.\"\"\" return self.current_val >=", "while not self._recving_file: pass # Display until file is received", "False return printerror(\"User aborted operation\") except timeout: self._progress_bar.stop() self._recving_file =", "port : int The target receiver's main TCP port. filepath", "import b64encode from datetime import timedelta from math import log", "fn, len_fs, fs]) except: return printerror(\"Error while preparing headers\") try:", "# Build the file path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename )", "= hex(res)[2:] res = '0'*(len(res) % 2) + res return", "self._sender_socket.send(b'\\x06') # NAK return printalert(\"Rejected file transfer\") except error: return", "b''.join([tok, len_fn, fn, len_fs, fs]) except: return printerror(\"Error while preparing", "printerror(\"Not yet set with receiver's parameters\") if not exists(filepath): printerror(\"File", "# NAK return printalert(\"Rejected file transfer\") except error: return printerror(\"Sender", "for w in range(num_workers): self.workers[w].bind((self.host_ip, self.worker_ports[w])) self.workers[w].settimeout(60) except: printerror('Error initializing", "\" f\"({ProgressBar.byte_rescale(d_value)}) \" f\"[{ProgressBar.byte_rescale(rate)}/s] \" f\"ETA: {eta}\", end=\"\\r\") # Flyter", "def __init__(self, max_value, length=50): self.max_value = max_value self.current_val = 0", "packet = f.read(self._packet_size) if not packet: break self.socket.send(packet) assert self.socket.recv(1)", "path to the file to be sent. \"\"\" sender =", "receiver's parameters\") if not exists(filepath): return printerror(\"File doesn't exist\") self._sending_file", "= main_port self.token = <PASSWORD>_<PASSWORD>(6) self._recver_hostname = None self._recver_token =", "self._recving_file = False try: # Build the file path =", "The filepath to the file to be sent. \"\"\" if", "receiving headers\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now receiving file", "Functions def send(ip_address, port, filepath): \"\"\" Send file to receiver", "sizes of the split-up file to be sent. \"\"\" if", "Headers try: tok = self.token num_w = max(1, len(self._worker_ports)) fpath", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "else: # Wait for progress bar while not self._progress_bar.done: pass", "getting connected with socket\") except: self.socket.send(b'\\x15') # NAK return printerror(\"Error", "\"\"\" Send a file with multiple workers. Speeds up transmission", "if version_info < (3, 6): warn('[!] Some features are not", "Used to set Sender's parameter settings used during data transmissions.", "w.close() def _recv_s(self): \"\"\"Receive a file with a single worker.\"\"\"", "action to be performed\" ) send_parser = subparsers.add_parser(\"send\") recv_parser =", "if not self.param_set: return printerror(\"Sender not yet set with parameters\")", "True def _send_m(self, filepath, file_sizes): \"\"\" Send a file with", "@staticmethod def storage_dir(hostname=None): \"\"\" Return the path of the storage", "<NAME> Permission is hereby granted, free of charge, to any", "get_terminal_size from socket import \\ socket, error, timeout, \\ ntohs,", "self._progress_bar.add_progress(len(packet)) fs -= len(packet) self._sender_socket.send(b'\\x06') # ACK except timeout: self._progress_bar.stop()", "def __del__(self): if isinstance(self.socket, socket): self.socket.close() def _send_s(self, filepath, file_size):", "connect to \" f\"{self.recver_ip}:{self.main_port}\") try: sender_hn = pack_str(gethostname()) len_sender_hn =", "round(data/pow(2, 10*p), precision) return f\"{r_bytes}{scale[p]}\" def __init__(self, max_value, length=50): self.max_value", "file_sizes[w] except FileNotFoundError: return printerror(\"Couldn't access file\") except PermissionError: return", "Sender's parameter settings used during data transmissions. \"\"\" try: self.socket.connect((self.recver_ip,", "packet = self._sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet) self._sender_socket.send(b'\\x06') #", "temp: packet = True while packet: packet = temp.read(self._packet_size) output.write(packet)", "spaces = ' '*(self.length - (prog + extra)) rate =", "obtaining a copy of this software and associated documentation files", "return printerror(\"Error while sending headers to receiver\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ]", "s in fsizes] fs = b''.join(fs) len_fs = int_to_bytes_s(num_w) headers", "2) + res return bytes.fromhex(res) def bytes_to_int_l(byteseq): \"\"\"Convert byte sequence", "eta_s = round((d_max_value - d_value)/rate) if rate else \\ None", "\"\"\" Pack and send Receiver's parameter settings. Used to set", "ACK except timeout: return printerror(\"Operation timed out\") except: return printerror(\"Error", "= bytes_to_int_s(self.socket.recv(2)) self._recver_hostname = unpack_str(self.socket.recv(len_hn)) self._recver_token = self.socket.recv(6) self._transfer_type =", "import Thread from time import time from warnings import warn", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "a copy of this software and associated documentation files (the", "sending processes. Note: Sends to FlyterReceiver instances. Parameterss ---------- recver_ip", "num_workers : int The amount of workers to be used", "sending headers to receiver\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now", "yet set with receiver's parameters\") if not exists(filepath): return printerror(\"File", "== b'\\x06' # ACK fs = end - start with", "not exists(filepath): return printerror(\"File doesn't exist\") # Headers try: tok", "timed out\") except: self._progress_bar.stop() return printerror(f\"Error while sending file\") else:", "not self._sending_file: pass # Display until file is sent while", "\"\"\" receiver = FlyterReciever(host_ip_address, port, workers) receiver.send_param_set() receiver.recv_file() if __name__", "recver_ip, main_port): self.recver_ip = recver_ip self.main_port = main_port self.token =", "integer to bytes for packing.\"\"\" res = ntohl(integer) res =", "float('inf') eta_s = round((d_max_value - d_value)/rate) if rate else \\", "a single worker.\"\"\" if not self.param_set: return printerror(\"Sender not yet", "= int(log(data, 2)/10) if data else 0 r_bytes = round(data/pow(2,", "= [ socket(AF_INET, SOCK_STREAM) for w in range(num_workers) ] if", "of the storage dir for received files. If storage directory", "= int_to_bytes_s(len(hn)) tok = self.token tr_type = pack_str(self.transfer_type) len_wp =", "'--workers', type=int, default=1, help=\"TCP port to listen on\") if len(argv)", "as sock: try: sock.bind((host, port)) except error: continue else: return", "fs = [int_to_bytes_l(s) for s in fsizes] fs = b''.join(fs)", "self._sending_file = False self._workers_active = 0 self._progress_bar = None try:", "of the progress bar. \"\"\" @staticmethod def byte_rescale(data, precision=1): scale", "None self.start_value = None self.stopped = False @property def done(self):", "= bytes_to_int_s(self._sender_socket.recv(2)) fn = unpack_str(self._sender_socket.recv(len_fn)) len_fs = bytes_to_int_s(self._sender_socket.recv(2)) fs =", "except: return printerror(\"Error building headers\") try: self._sender_socket.send(headers) assert self._sender_socket.recv(1) ==", "= stat(fpath).st_size fsizes = [fsize//num_w for w in range(num_w)] fsizes[-1]", "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF", "__init__(self, recver_ip, main_port): self.recver_ip = recver_ip self.main_port = main_port self.token", "d_time else float('inf') eta_s = round((d_max_value - d_value)/rate) if rate", "= join(appfiles_dirname, 'Received Files') if not exists(storage_dirname): mkdir(storage_dirname) if hostname:", "return res def send_param_set(self): \"\"\" Pack and send Receiver's parameter", "is sent while not self._progress_bar.done: self._progress_bar.display() except: return printerror(\"Error with", "unsuccessful\") else: self._sender_socket.send(b'\\x06') # ACK # Wait for progress bar", "message.\"\"\" global FROMTERMINAL print(f'[!] {alert}') def int_to_bytes_s(integer): \"\"\"Convert 16 -", "it first. Parameters ---------- hostname : str The name of", "self._sender_filesizes[worker_num] with open(fpath, 'bw') as f: while self._recving_file and f.writable()", "print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now sending file ({ProgressBar.byte_rescale(fsize)})\") #", "printerror(\"Error getting parameters from receiver\") else: self.param_set = True class", "printerror(\"Operation timed out\") except error: self._progress_bar.stop() self._recving_file = False return", "data transmissions. \"\"\" try: printalert(\"Waiting for sender\") self.socket.listen(1) self._sender_socket, addrport", "byte sequence to 32 - but integer for unpacking.\"\"\" res", ":obj:`int`, optional The number of workers to use. \"\"\" receiver", "to permission error\") except timeout: self._progress_bar.stop() return printerror(\"Operation timed out\")", "bar thread self._progress_bar = ProgressBar(fsize, 40) self._progress_bar.start() def progress_thread(): try:", "self._recving_file = False return printerror(\"Error with sockets\") except: self._progress_bar.stop() self._recving_file", "host IP address. port : int The receiver's host port", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,", "False return True def _send_m(self, filepath, file_sizes): \"\"\" Send a", "= 512 def __init__(self, host_ip, main_port, num_workers): self.host_ip = host_ip", "\"\"\" if not self.param_set: return printerror(\"Sender not yet set with", "if not self.param_set: return printerror(\"Not yet set with receiver's parameters\")", "res = self._recv_m() else: res = None except: self._progress_bar.stop() self._recving_file", "def send(ip_address, port, filepath): \"\"\" Send file to receiver on", "16) return htons(res) def int_to_bytes_l(integer): \"\"\"Convert 32 - but integer", "printerror(\"File doesn't exist\") # Headers try: tok = self.token num_w", "port)) except error: continue else: return port def printerror(errormsg): \"\"\"Print", "import timedelta from math import log from os import altsep,", "printerror(\"Error getting connected with socket\") except: self.socket.send(b'\\x15') # NAK return", "range(num_workers) ] if num_workers > 1 else [] if self.workers:", "file. Parameters ---------- filepath : str The filepath of the", "is furnished to do so, subject to the following conditions:", "value): \"\"\" Count new progress. Parameter --------- value : int,", "printerror(f\"Error while sending file\") else: self._sending_file = False return True", "__del__(self): if isinstance(self.socket, socket): self.socket.close() def _send_s(self, filepath, file_size): \"\"\"", "The filepath of the file to be sent. \"\"\" if", "byte sequence.\"\"\" return string.encode() def unpack_str(byteseq): \"\"\"Unpack a byte sequence", "return printerror(\"Couldn't access storage directory\") except error: self._progress_bar.stop() return printerror(\"Error", "printerror(\"Couldn't access file due to permission error\") while self._workers_active: try:", "self.start_value d_time = time() - self.start_time per = d_value/d_max_value prog", "FileNotFoundError: self._progress_bar.stop() return printerror(\"Couldn't access file\") except PermissionError: self._progress_bar.stop() return", "self.current_val += value def display(self): \"\"\"Display the current progress.\"\"\" if", "length=50): self.max_value = max_value self.current_val = 0 self.length = length", "filepath of the file to be sent. \"\"\" if not", "self._sending_file: pass # Display until file is sent while not", "try: sock.bind((host, port)) except error: continue else: return port def", "= None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._recving_file = False self._workers_active =", "for receiver to accept file\") assert self.socket.recv(1) == b'\\x06' #", "if self._transfer_type == 'S': res = self._send_s(fpath, fsize) elif self._transfer_type", "= True path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) fs =", "workers. Speeds up transmission rate by using multiple workers. \"\"\"", "fs = self._sender_filesizes[worker_num] with open(fpath, 'bw') as f: while self._recving_file", "send(ip_address, port, filepath): \"\"\" Send file to receiver on the", "printerror(\"User aborted operation\") except AssertionError: return printerror(\"Receiver rejected\") except timeout:", "continue else: return port def printerror(errormsg): \"\"\"Print an error message.\"\"\"", "try: pass except KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False printerror(\"User aborted", "return printerror(\"Not yet set with receiver's parameters\") if not exists(filepath):", "SOCK_STREAM) as sock: try: sock.bind((host, port)) except error: continue else:", "if eta_s is not None else '?' clear_line = \"", "while saving file\") else: return True def recv_file(self): \"\"\"Receive a", "range(len(self.worker_ports)): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) Thread( target=threadfunc, args=(w,", "License Copyright (c) 2021 <NAME> Permission is hereby granted, free", "sent: {fname}\") return res def recv_param_set(self): \"\"\" Receive and unpack", "\\ socket, error, timeout, \\ ntohs, ntohl, htons, htonl, \\", "self.socket.bind((self.host_ip, self.main_port)) self.socket.settimeout(60) self.workers = [ socket(AF_INET, SOCK_STREAM) for w", "\"\"\" Flyter Tool for transferring files on the same network", "Files') if not exists(storage_dirname): mkdir(storage_dirname) if hostname: host_storage_dirname = join(storage_dirname,", "exists(appfiles_dirname): mkdir(appfiles_dirname) storage_dirname = join(appfiles_dirname, 'Received Files') if not exists(storage_dirname):", "filepath.replace(altsep, sep) fname = fpath.split(sep)[-1] fsize = stat(fpath).st_size fsizes =", "= None self.stopped = False @property def done(self): \"\"\"Return if", "== 1 else 'M' self.worker_ports = [ random_port(self.host_ip) for w", "'br') as temp: packet = True while packet: packet =", "timeout: return printerror(\"No sender available\") except: return printerror(\"Error while waiting", "= bytes_to_int_s(self._sender_socket.recv(2)) fs = [bytes_to_int_l(self._sender_socket.recv(4)) for s in range(len_fs)] fs_all", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT", "PermissionError: return printerror(\"Couldn't access file due to permission error\") while", "printerror(\"Not yet set with receiver's parameters\") if not exists(filepath): return", "return printerror(f\"Receiver rejected packet\") except FileNotFoundError: self._progress_bar.stop() self._sending_file = False", "already finished.\"\"\" return self.current_val >= self.max_value or self.stopped def start(self):", "= False return printerror(\"User aborted operation\") except AssertionError: self._progress_bar.stop() self._sending_file", "self._recving_file = False printerror(\"User aborted operation\") self._recving_file = False try:", "to FlyterReceiver instances. Parameterss ---------- recver_ip : str The IP", "\"\"\" Receive a file with multiple workers. Speeds up transmission", "= join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) with open(wpath, 'br') as temp:", "def _recv_s(self): \"\"\"Receive a file with a single worker.\"\"\" if", "to listen on\") recv_parser.add_argument('-w', '--workers', type=int, default=1, help=\"TCP port to", "receive(host_ip_address, port, workers=1): \"\"\" Receive a file from sender on", "join(storage_dirname, hostname) if not exists(host_storage_dirname): mkdir(host_storage_dirname) return host_storage_dirname else: return", "\"CryptoNyxz\" __license__ = \"\"\" MIT License Copyright (c) 2021 <NAME>", "OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "The filepath to the file to be sent. file_sizes :", "file ({ProgressBar.byte_rescale(fs_all)})\") # Progress bar thread self._progress_bar = ProgressBar(fs_all, 35)", "\"\"\"Convert 16 - bit integer to bytes for packing.\"\"\" res", "randint from secrets import token_bytes from shutil import get_terminal_size from", "self._transfer_type = None self._worker_ports = None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._sending_file", "Delete the temp files for w in range(num_workers): wpath =", "except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access storage directory\") except error:", "until file is sent while not self._progress_bar.done: self._progress_bar.display() except: return", "MIT License Copyright (c) 2021 <NAME> Permission is hereby granted,", "= False return printerror(\"User aborted operation\") except timeout: self._progress_bar.stop() self._recving_file", "port in self.worker_ports] wp = b''.join(wp) headers = b''.join([len_hn, hn,", "import randint from secrets import token_bytes from shutil import get_terminal_size", "hostname: host_storage_dirname = join(storage_dirname, hostname) if not exists(host_storage_dirname): mkdir(host_storage_dirname) return", "byte sequence to 16 - but integer for unpacking.\"\"\" res", "args = parser.parse_args() if args.action == \"send\": send(args.ip, args.port, args.file)", "'br') as f: f.seek(start) while self._sending_file and fs: end_size =", "== \"send\": send(args.ip, args.port, args.file) elif args.action == \"recv\": receive(args.ip,", "\"\"\" @staticmethod def byte_rescale(data, precision=1): scale = ['B', 'KB', 'MB',", "stat, unlink from os.path import dirname, exists, join from random", "the temp files for w in range(num_workers): wpath = join(", "len_wp = int_to_bytes_s(len(self.worker_ports)) wp = [int_to_bytes_s(port) for port in self.worker_ports]", "bytes_to_int_s(self._sender_socket.recv(2)) fn = unpack_str(self._sender_socket.recv(len_fn)) len_fs = bytes_to_int_s(self._sender_socket.recv(2)) fs = [bytes_to_int_l(self._sender_socket.recv(4))", "self.token = <PASSWORD>_<PASSWORD>(6) self.transfer_type = 'S' if num_workers == 1", "FROMTERMINAL = True args = parser.parse_args() if args.action == \"send\":", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR", "None try: self.socket = socket(AF_INET, SOCK_STREAM) self.socket.settimeout(60) except: printerror('Error initializing", "self._sender_filesizes = None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._recving_file = False self._workers_active", "= True def add_progress(self, value): \"\"\" Count new progress. Parameter", "Thread(target=progress_thread).start() self._sender_token = tok self._sender_filename = fn self._sender_filesizes = fs", "file is sent while not self._progress_bar.done: self._progress_bar.display() except: return printerror(\"Error", "self.main_port)) self.socket.settimeout(60) self.workers = [ socket(AF_INET, SOCK_STREAM) for w in", "except FileNotFoundError: self._progress_bar.stop() self._sending_file = False return printerror(\"Couldn't access file\")", "except error: return printerror(\"Error with sockets\") except: self._sender_socket.send(b'\\x15') # NAK", "self._recving_file = False self._workers_active = 0 self._progress_bar = ProgressBar(None) try:", "= self._sender_filesizes[worker_num] with open(fpath, 'bw') as f: while self._recving_file and", "aborted operation\") except AssertionError: return printerror(\"Receiver rejected\") except timeout: return", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS", "b64encode from datetime import timedelta from math import log from", "\"\"\" DEFAULT_PACKET_SIZE = 1024 def __init__(self, recver_ip, main_port): self.recver_ip =", "# ACK except: self._progress_bar.stop() self._sending_file = False return printerror(f\"Sending file", "recver_socket.accept() send_tok = sender_socket.recv(6) if send_tok == self._sender_token: sender_socket.send(b'\\x06') #", "= f.read(size) if not packet: break sock.send(packet) assert sock.recv(1) ==", "The target receiver's IP address. port : int The target", "not exists(storage_dirname): mkdir(storage_dirname) if hostname: host_storage_dirname = join(storage_dirname, hostname) if", "access file due to permission error\") except: return printerror(\"Error while", "SOCK_STREAM) as sock: sock.connect( (self.recver_ip, self._worker_ports[worker_num]) ) sock.send(self.token) assert sock.recv(1)", "with receiver's parameters\") if not exists(filepath): return printerror(\"File doesn't exist\")", "ACK except: return printerror(\"Error while sending headers to sender\") else:", "return bytes.fromhex(res) def bytes_to_int_s(byteseq): \"\"\"Convert byte sequence to 16 -", "not self.param_set: return printerror(\"Not yet set with receiver's parameters\") if", "self._sending_file = False return printerror(\"User aborted operation\") except AssertionError: self._progress_bar.stop()", "return printerror(\"Error building headers\") try: self._sender_socket.send(headers) assert self._sender_socket.recv(1) == b'\\x06'", "self._progress_bar.stop() self._sending_file = False return printerror(f\"Receiver rejected packet\") except FileNotFoundError:", "handshake\") except timeout: return printerror('Operation timed out') except: return printerror(\"Error", "ACK except KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False return printerror(\"User aborted", "TCP port to be used. num_workers : int The amount", "if data else 0 r_bytes = round(data/pow(2, 10*p), precision) return", "__init__(self, host_ip, main_port, num_workers): self.host_ip = host_ip self.main_port = main_port", "end): self._workers_active += 1 try: with socket(AF_INET, SOCK_STREAM) as sock:", "file is received while not self._progress_bar.done: self._progress_bar.display() except: return printerror(\"Error", "timed out') except: return printerror(\"Error during handshake\") try: len_hn =", "self._recver_hostname = None self._recver_token = None self._transfer_type = None self._worker_ports", "while not self._sending_file: pass # Display until file is sent", "to permission error\") while self._workers_active: try: pass except KeyboardInterrupt: self._progress_bar.stop()", "time() - self.start_time per = d_value/d_max_value prog = int(self.length*per) extra", ": int, float Added progress value. \"\"\" if self.stopped: return", "to permission \" \"error\") except timeout: self._progress_bar.stop() self._sending_file = False", "args=( w, filepath, size, size + file_sizes[w] ), ).start() size", "TCP port. filepath : str The path to the file", "ntohs, ntohl, htons, htonl, \\ gethostname, \\ AF_INET, SOCK_STREAM from", "+ '▌'*extra spaces = ' '*(self.length - (prog + extra))", "'█'*prog + '▌'*extra spaces = ' '*(self.length - (prog +", "file due to permission error\") except: return printerror(\"Error while starting", "self._sender_socket.send(headers) assert self._sender_socket.recv(1) == b'\\x06' # ACK except: return printerror(\"Error", "_recv_s(self): \"\"\"Receive a file with a single worker.\"\"\" if not", "sender available\") except: return printerror(\"Error while waiting for sender\") try:", "assert self._sender_socket.recv(1) == b'\\x06' # ACK except: return printerror(\"Error while", "size + file_sizes[w] ), ).start() size += file_sizes[w] except FileNotFoundError:", "f: while self._sending_file and fs: packet = f.read(self._packet_size) if not", "The sizes of the split-up file to be sent. \"\"\"", "len_wp = bytes_to_int_s(self.socket.recv(2)) self._worker_ports = [bytes_to_int_s(self.socket.recv(2)) for w in range(len_wp)]", "return printerror(\"Sender isn't available anymore\") except: self._sender_socket.send(b'\\x15') # NAK return", "file while not self._recving_file: pass # Display until file is", "a byte sequence.\"\"\" return string.encode() def unpack_str(byteseq): \"\"\"Unpack a byte", "return printerror(\"Error with sockets\") except: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error", "self._send_m(fpath, fsizes) assert self.socket.recv(1) == b'\\x06' # ACK except: self._progress_bar.stop()", "FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) unlink(wpath) except PermissionError: self._sender_socket.send(b'\\x15') # NAK return", "the progress bar. length : :obj:`int`, optional The length of", "else: warn(errormsg) def printalert(alert): \"\"\"Print an alert message.\"\"\" global FROMTERMINAL", "Utility Functions def random_port(host): \"\"\"Return a random available TCP port.\"\"\"", "__del__(self): if isinstance(self.__dict__.get('socket'), socket): self.socket.close() if self.__dict__.get('workers'): for w in", "assert sock.recv(1) == b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet)", "return printerror(\"Couldn't access file\") except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access", "\\ None eta = timedelta(seconds=eta_s) if eta_s is not None", "appfiles_dirname = join(app_dirname, 'Flyter') if not exists(appfiles_dirname): mkdir(appfiles_dirname) storage_dirname =", "first. Parameters ---------- hostname : str The name of the", "ProgressBar: \"\"\" For displaying progress bars. Parameters ---------- max_value :", "self.token num_w = max(1, len(self._worker_ports)) fpath = filepath.replace(altsep, sep) fname", "port number\") send_parser.add_argument('-f', '--file', required=True, help=\"Path to the file to", "timeout: return printerror(\"Operation timed out\") except Exception: return printerror(\"Error while", "fs = end - start with open(fpath, 'br') as f:", "error: continue else: return port def printerror(errormsg): \"\"\"Print an error", "assert sock.recv(1) == b'\\x06' # ACK fs = end -", "send_parser.add_argument('-p', '--port', type=int, required=True, help=\"Target receiver's TCP port number\") send_parser.add_argument('-f',", "= pack_str(fname) len_fn = int_to_bytes_s(len(fn)) fs = [int_to_bytes_l(s) for s", "False # Utility Functions def random_port(host): \"\"\"Return a random available", "printerror(\"Operation timed out\") except: return printerror(\"Error during handshake\") try: hn", "ACK except error: return printerror(\"Error getting connected with socket\") except:", "in range(len_wp)] self.socket.send(b'\\x06') # ACK except error: return printerror(\"Error getting", "\"\"\"Receive a file with a single worker.\"\"\" if not self.param_set:", "float The upper limit of the progress bar. length :", "len(packet) except AssertionError: self._progress_bar.stop() return printerror(\"Receiver rejected packet\") except FileNotFoundError:", "= False def __del__(self): if isinstance(self.__dict__.get('socket'), socket): self.socket.close() if self.__dict__.get('workers'):", "unlink(wpath) except PermissionError: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Couldn't save file", "a byte sequence into a string.\"\"\" return byteseq.decode() # Utility", "return printerror(f\"Error while sending file\") else: self._sending_file = False return", "rejected packet\") except FileNotFoundError: self._progress_bar.stop() self._sending_file = False return printerror(\"Couldn't", "int_to_bytes_l(integer): \"\"\"Convert 32 - but integer to bytes for packing.\"\"\"", "b'\\x06' # ACK fs = end - start with open(fpath,", "port) sender.recv_param_set() return sender.send_file(filepath) def receive(host_ip_address, port, workers=1): \"\"\" Receive", "fs = b''.join(fs) len_fs = int_to_bytes_s(num_w) headers = b''.join([tok, len_fn,", "(self.recver_ip, self._worker_ports[worker_num]) ) sock.send(self.token) assert sock.recv(1) == b'\\x06' # ACK", "try: len_hn = bytes_to_int_s(self.socket.recv(2)) self._recver_hostname = unpack_str(self.socket.recv(len_hn)) self._recver_token = self.socket.recv(6)", "self._sending_file = False return printerror(f\"Error while sending file\") finally: self._workers_active", "sent files are stored. \"\"\" app_dirname = dirname(__file__) appfiles_dirname =", "assert self.socket.recv(1) == b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet)", "= self.socket.recv(6) self._transfer_type = unpack_str(self.socket.recv(1)) len_wp = bytes_to_int_s(self.socket.recv(2)) self._worker_ports =", "exists(filepath): return printerror(\"File doesn't exist\") self._sending_file = True try: fs", "self.socket.send(packet) assert self.socket.recv(1) == b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs -=", "use encryption. \"\"\" __version__ = (0, 0, 0) __author__ =", "= length self.rate = None self.start_time = None self.start_value =", "split-up file to be sent. \"\"\" if not self.param_set: return", "the same network. Parameters ---------- host_ip_address : str The receiver's", "portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\",", "return printerror(\"Error getting parameters from receiver\") else: self.param_set = True", "round((d_max_value - d_value)/rate) if rate else \\ None eta =", "# NAK return printerror(\"Error while receiving headers\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ]", "not self._progress_bar.done: self._progress_bar.display() except: return printerror(\"Error with progress thread\") Thread(target=progress_thread).start()", "= None self.start_time = None self.start_value = None self.stopped =", "file ({ProgressBar.byte_rescale(fsize)})\") # Progress bar thread self._progress_bar = ProgressBar(fsize, 40)", "done(self): \"\"\"Return if already finished.\"\"\" return self.current_val >= self.max_value or", "= sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet) sender_socket.send(b'\\x06') # ACK", "packing.\"\"\" res = ntohs(integer) res = hex(res)[2:] res = '0'*(len(res)", "packet: break sock.send(packet) assert sock.recv(1) == b'\\x06' # ACK self._progress_bar.add_progress(len(packet))", "bytes.fromhex(res) def bytes_to_int_s(byteseq): \"\"\"Convert byte sequence to 16 - but", "False return printerror(f\"Sending file was unsuccessful\") else: # Wait for", "bytes_to_int_s(self.socket.recv(2)) self._recver_hostname = unpack_str(self.socket.recv(len_hn)) self._recver_token = self.socket.recv(6) self._transfer_type = unpack_str(self.socket.recv(1))", "not exists(filepath): return printerror(\"File doesn't exist\") self._sending_file = True try:", "modify, merge, publish, distribute, sublicense, and/or sell copies of the", "res = int(res, 16) return htons(res) def int_to_bytes_l(integer): \"\"\"Convert 32", "else [] if self.workers: for w in range(num_workers): self.workers[w].bind((self.host_ip, self.worker_ports[w]))", "OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH", "with open(wpath, 'br') as temp: packet = True while packet:", "# Utility Functions def random_port(host): \"\"\"Return a random available TCP", "except AssertionError: self._progress_bar.stop() self._sending_file = False return printerror(f\"Receiver rejected packet\")", "= int_to_bytes_s(len(fn)) fs = [int_to_bytes_l(s) for s in fsizes] fs", "= sum(fs) answer = input(f\"{self._sender_hostname}-{b64_tok}\" f\" wants to send: {fn}", "raw sockets. Doesn't use encryption. \"\"\" __version__ = (0, 0,", "= ProgressBar(fsize, 40) self._progress_bar.start() def progress_thread(): try: # Wait until", "None self._sender_token = None self._sender_filename = None self._sender_filesizes = None", "Tool for transferring files on the same network using raw", "0, 0) __author__ = \"CryptoNyxz\" __license__ = \"\"\" MIT License", "file while not self._sending_file: pass # Display until file is", "sender_socket.recv(6) if send_tok == self._sender_token: sender_socket.send(b'\\x06') # ACK else: sender_socket.send(b'\\x15')", "1: FROMTERMINAL = True args = parser.parse_args() if args.action ==", "sell copies of the Software, and to permit persons to", "self._progress_bar = ProgressBar(None) try: self.socket = socket(AF_INET, SOCK_STREAM) self.socket.bind((self.host_ip, self.main_port))", "int_to_bytes_s(integer): \"\"\"Convert 16 - bit integer to bytes for packing.\"\"\"", "'S' if num_workers == 1 else 'M' self.worker_ports = [", "return printerror(\"Error with socket\") except: self._progress_bar.stop() return printerror(\"Error receiving file\")", "# ACK except error: return printerror(\"Error getting connected with socket\")", "len_fs = bytes_to_int_s(self._sender_socket.recv(2)) fs = [bytes_to_int_l(self._sender_socket.recv(4)) for s in range(len_fs)]", "True def send_file(self, filepath): \"\"\" Send a file. Parameters ----------", "of the receiver. \"\"\" DEFAULT_PACKET_SIZE = 1024 def __init__(self, recver_ip,", "socket(AF_INET, SOCK_STREAM) self.socket.settimeout(60) except: printerror('Error initializing sockets') self.param_set = False", "---------- recver_ip : str The IP address of the receiver.", "transferring files on the same network using raw sockets. Doesn't", "return storage_dirname DEFAULT_PACKET_SIZE = 512 def __init__(self, host_ip, main_port, num_workers):", "copy, modify, merge, publish, distribute, sublicense, and/or sell copies of", "file to be sent. file_sizes : list(int) The sizes of", "a random available TCP port.\"\"\" while True: port = randint(10_000,", "{fname} ]\") self.socket.send(headers) print(\"Waiting for receiver to accept file\") assert", "socket(AF_INET, SOCK_STREAM) for w in range(num_workers) ] if num_workers >", "sys import argv, exit, version_info if version_info < (3, 6):", "len(packet) self._sender_socket.send(b'\\x06') # ACK except timeout: self._progress_bar.stop() return printerror(\"Operation timed", "self._progress_bar.stop() return printerror(\"Error receiving file\") else: self._recving_file = False return", "s in range(len_fs)] fs_all = sum(fs) answer = input(f\"{self._sender_hostname}-{b64_tok}\" f\"", "pack_str(string): \"\"\"Pack a string into a byte sequence.\"\"\" return string.encode()", "files. If storage directory doesn't exist, creates it first. Parameters", "IP address. port : int The receiver's host port to", "size, size + file_sizes[w] ), ).start() size += file_sizes[w] except", "self._recving_file: pass # Display until file is received while not", "TCP port.\"\"\" while True: port = randint(10_000, 65536) with socket(AF_INET,", "'M': res = self._recv_m() else: res = None except: self._progress_bar.stop()", "= int(res, 16) return htonl(res) def pack_str(string): \"\"\"Pack a string", "the file to be sent\") recv_parser.add_argument('-i', '--ip', required=True, help=\"Host IP", "sep) fname = fpath.split(sep)[-1] fsize = stat(fpath).st_size fsizes = [fsize//num_w", "try: # Wait until sending file while not self._sending_file: pass", "name of the subdirectory where that host's sent files are", "or substantial portions of the Software. THE SOFTWARE IS PROVIDED", "EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False return printerror(\"User aborted operation\") except", "has been deleted\") except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access storage", "print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now receiving file ({ProgressBar.byte_rescale(fs_all)})\") #", "to receiver on the same network. Parameters ---------- ip_address :", "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "self._sending_file = True try: fs = file_size with open(filepath, 'br')", "publish, distribute, sublicense, and/or sell copies of the Software, and", "False @property def done(self): \"\"\"Return if already finished.\"\"\" return self.current_val", "SOCK_STREAM) for w in range(num_workers) ] if num_workers > 1", "workers. Speeds up transmission rate by using multiple workers. Parameters", "\"\"\"Pack a string into a byte sequence.\"\"\" return string.encode() def", "receiving file\") else: self._recving_file = False return True def _recv_m(self):", "fname = fpath.split(sep)[-1] fsize = stat(fpath).st_size fsizes = [fsize//num_w for", "filepath, file_sizes): \"\"\" Send a file with multiple workers. Speeds", "self._progress_bar.stop() self._recving_file = False return printerror(\"Error with sockets\") except: self._progress_bar.stop()", "filepath, size, size + file_sizes[w] ), ).start() size += file_sizes[w]", "be used during transmission. \"\"\" @staticmethod def storage_dir(hostname=None): \"\"\" Return", "{self._sender_filename}\") return res def send_param_set(self): \"\"\" Pack and send Receiver's", "f\"{100*per:.1f}% \" f\"({ProgressBar.byte_rescale(d_value)}) \" f\"[{ProgressBar.byte_rescale(rate)}/s] \" f\"ETA: {eta}\", end=\"\\r\") #", "host_ip : str The Host IP address to be used.", "self._worker_ports = [bytes_to_int_s(self.socket.recv(2)) for w in range(len_wp)] self.socket.send(b'\\x06') # ACK", "try: hn = pack_str(gethostname()) len_hn = int_to_bytes_s(len(hn)) tok = self.token", "return printerror(\"Error receiving file\") else: self._recving_file = False return True", "in range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) unlink(wpath) except", "self._sender_filename ) with open(path, 'bw') as output: for w in", "None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._recving_file = False self._workers_active = 0", "was unsuccessful\") else: self._sender_socket.send(b'\\x06') # ACK # Wait for progress", "def display(self): \"\"\"Display the current progress.\"\"\" if self.stopped: return d_value", "\"*(get_terminal_size().columns - 1) print(f\"{clear_line}\\r\" \"Progress: \" f\"|{prog_bar}{spaces}| \" f\"{100*per:.1f}% \"", "= self.token tr_type = pack_str(self.transfer_type) len_wp = int_to_bytes_s(len(self.worker_ports)) wp =", "be included in all copies or substantial portions of the", "str The filepath to the file to be sent. file_sizes", "parameters\") if not exists(filepath): printerror(\"File doesn't exist\") def threadfunc(worker_num, fpath,", "temp.read(self._packet_size) output.write(packet) # Clear the contents of the temp file", "Parameters ---------- host_ip : str The Host IP address to", "\"\"\" sender = FlyterSender(ip_address, port) sender.recv_param_set() return sender.send_file(filepath) def receive(host_ip_address,", "from socket import \\ socket, error, timeout, \\ ntohs, ntohl,", "main_port self.token = <PASSWORD>_<PASSWORD>(6) self._recver_hostname = None self._recver_token = None", "port def printerror(errormsg): \"\"\"Print an error message.\"\"\" global FROMTERMINAL if", "PermissionError: return printerror(\"Couldn't access file due to permission error\") except:", "= '0'*(len(res) % 2) + res return bytes.fromhex(res) def bytes_to_int_l(byteseq):", "size = (self._packet_size - max(0, end_size - end)) packet =", "False return printerror(\"Error while receiving file\") finally: self._workers_active -= 1", "while self._workers_active: try: pass except KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False", "filepath : str The filepath of the file to be", "sockets. Doesn't use encryption. \"\"\" __version__ = (0, 0, 0)", "fn = unpack_str(self._sender_socket.recv(len_fn)) len_fs = bytes_to_int_s(self._sender_socket.recv(2)) fs = [bytes_to_int_l(self._sender_socket.recv(4)) for", "during handshake\") try: hn = pack_str(gethostname()) len_hn = int_to_bytes_s(len(hn)) tok", "self.recver_ip = recver_ip self.main_port = main_port self.token = <PASSWORD>_<PASSWORD>(6) self._recver_hostname", "Doesn't use encryption. \"\"\" __version__ = (0, 0, 0) __author__", "OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from argparse import", "the file path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) with open(path,", "= None self._worker_ports = None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._sending_file =", "host_storage_dirname else: return storage_dirname DEFAULT_PACKET_SIZE = 512 def __init__(self, host_ip,", "to be sent\") recv_parser.add_argument('-i', '--ip', required=True, help=\"Host IP address\") recv_parser.add_argument('-p',", "set with parameters\") def threadfunc(worker_num, fpath): self._workers_active += 1 try:", "if self.transfer_type == 'S': res = self._recv_s() elif self.transfer_type ==", "# ACK except: return printerror(\"Error while sending headers to sender\")", "2) + res return bytes.fromhex(res) def bytes_to_int_s(byteseq): \"\"\"Convert byte sequence", "receiver = FlyterReciever(host_ip_address, port, workers) receiver.send_param_set() receiver.recv_file() if __name__ ==", "else: self.param_set = True class FlyterReciever: \"\"\" Handles Flyter file", "= False return printerror(\"Couldn't access file due to permission \"", "For displaying progress bars. Parameters ---------- max_value : int, float", "due to permission \" \"error\") except timeout: self._progress_bar.stop() self._sending_file =", "<PASSWORD>_<PASSWORD>(6) self._recver_hostname = None self._recver_token = None self._transfer_type = None", "self.workers[w].settimeout(60) except: printerror('Error initializing sockets') self.param_set = False def __del__(self):", "except: return printerror(\"Error while preparing headers\") try: b64_tok = b64encode(self._recver_token).decode()", "self._progress_bar.stop() return printerror(\"Operation timed out\") except FileNotFoundError: self._progress_bar.stop() return printerror(\"Downloading", "= False return printerror(f\"Sending file was unsuccessful\") else: # Wait", ") subparsers = parser.add_subparsers( dest=\"action\", help=\"The action to be performed\"", "unpack_str(byteseq): \"\"\"Unpack a byte sequence into a string.\"\"\" return byteseq.decode()", "sending res = None try: if self._transfer_type == 'S': res", "file\") else: return True def recv_file(self): \"\"\"Receive a file.\"\"\" if", ": str The filepath to the file to be sent.", "\"send\": send(args.ip, args.port, args.file) elif args.action == \"recv\": receive(args.ip, args.port,", "host_ip_address : str The receiver's host IP address. port :", "= None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._sending_file = False self._workers_active =", "main_port): self.recver_ip = recver_ip self.main_port = main_port self.token = <PASSWORD>_<PASSWORD>(6)", "else float('inf') eta_s = round((d_max_value - d_value)/rate) if rate else", "conditions: The above copyright notice and this permission notice shall", "(self._packet_size - max(0, end_size - end)) packet = f.read(size) if", "the Software without restriction, including without limitation the rights to", "THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND", "except: self._progress_bar.stop() self._sending_file = False return printerror(f\"Sending file was unsuccessful\")", "str The path to the file to be sent. \"\"\"", "---------- ip_address : str The target receiver's IP address. port", "= bytes.hex(byteseq) res = int(res, 16) return htonl(res) def pack_str(string):", "self._progress_bar.stop() return printerror(\"Couldn't access file due to permission error\") except", "altsep, sep, \\ mkdir, stat, unlink from os.path import dirname,", "with socket(AF_INET, SOCK_STREAM) as sock: sock.connect( (self.recver_ip, self._worker_ports[worker_num]) ) sock.send(self.token)", "rejected packet\") except FileNotFoundError: self._progress_bar.stop() return printerror(\"Couldn't access file\") except", "(3, 6): warn('[!] Some features are not be compatible with", "hex(res)[2:] res = '0'*(len(res) % 2) + res return bytes.fromhex(res)", "sender_socket.send(b'\\x06') # ACK except KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False return", "using multiple workers. \"\"\" if not self.param_set: return printerror(\"Sender not", "storage_dirname = join(appfiles_dirname, 'Received Files') if not exists(storage_dirname): mkdir(storage_dirname) if", "address. port : int The target receiver's main TCP port.", "= (0, 0, 0) __author__ = \"CryptoNyxz\" __license__ = \"\"\"", "printerror(\"Not yet set with receiver's parameters\") # Headers try: tok", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "os.path import dirname, exists, join from random import randint from", "= 0 self._progress_bar = None try: self.socket = socket(AF_INET, SOCK_STREAM)", "+ extra)) rate = d_value/d_time if d_time else float('inf') eta_s", "THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "up transmission rate by using multiple workers. Parameters ---------- filepath", "list(int) The sizes of the split-up file to be sent.", "w in range(len_wp)] self.socket.send(b'\\x06') # ACK except error: return printerror(\"Error", "threadfunc(worker_num, fpath): self._workers_active += 1 try: recver_socket = self.workers[worker_num] recver_socket.listen(1)", "\"\"\" Return the path of the storage dir for received", "port, filepath): \"\"\" Send file to receiver on the same", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "Note: Sends to FlyterReceiver instances. Parameterss ---------- recver_ip : str", "network. Parameters ---------- ip_address : str The target receiver's IP", "+ res return bytes.fromhex(res) def bytes_to_int_s(byteseq): \"\"\"Convert byte sequence to", "to send: {fn} \" f\"({ProgressBar.byte_rescale(fs_all)}). \" \"Accept? (y/n) \") if", "filepath : str The path to the file to be", "> 1: FROMTERMINAL = True args = parser.parse_args() if args.action", "self.param_set = True # Simplified Functions def send(ip_address, port, filepath):", "except Exception: return printerror(\"Error while sending headers to receiver\") print(f\"[", "version_info if version_info < (3, 6): warn('[!] Some features are", "workers=1): \"\"\" Receive a file from sender on the same", "send_parser.add_argument('-f', '--file', required=True, help=\"Path to the file to be sent\")", "-= len(packet) self._sender_socket.send(b'\\x06') # ACK except timeout: self._progress_bar.stop() return printerror(\"Operation", "if not exists(filepath): return printerror(\"File doesn't exist\") # Headers try:", "sender_hn = pack_str(gethostname()) len_sender_hn = int_to_bytes_s(len(sender_hn)) self.socket.send(b''.join([len_sender_hn, sender_hn])) assert self.socket.recv(1)", "\"\"\"Receive a file.\"\"\" if not self.param_set: return printerror(\"Not yet set", "mkdir, stat, unlink from os.path import dirname, exists, join from", "except timeout: self._progress_bar.stop() return printerror(\"Operation timed out\") except: self._progress_bar.stop() return", "f\"{self.recver_ip}:{self.main_port}\") try: sender_hn = pack_str(gethostname()) len_sender_hn = int_to_bytes_s(len(sender_hn)) self.socket.send(b''.join([len_sender_hn, sender_hn]))", "return printerror(\"User aborted operation\") except timeout: self._progress_bar.stop() self._recving_file = False", "error: self._progress_bar.stop() return printerror(\"Error with socket\") except: self._progress_bar.stop() return printerror(\"Error", "import argv, exit, version_info if version_info < (3, 6): warn('[!]", "ACK else: self._sender_socket.send(b'\\x06') # NAK return printalert(\"Rejected file transfer\") except", "be sent. \"\"\" sender = FlyterSender(ip_address, port) sender.recv_param_set() return sender.send_file(filepath)", ": str The name of the subdirectory where that host's", "self._sender_socket.recv(6) b64_tok = b64encode(tok).decode() len_fn = bytes_to_int_s(self._sender_socket.recv(2)) fn = unpack_str(self._sender_socket.recv(len_fn))", "file\") except PermissionError: return printerror(\"Couldn't access file due to permission", "to permit persons to whom the Software is furnished to", "send Receiver's parameter settings. Used to set Sender's parameter settings", "import \\ socket, error, timeout, \\ ntohs, ntohl, htons, htonl,", "timeout: return printerror('Operation timed out') except: return printerror(\"Error during handshake\")", "file_size with open(filepath, 'br') as f: while self._sending_file and fs:", "printerror(\"Couldn't save file due to permissions\") except error: return printerror(\"Error", "self._sender_token = tok self._sender_filename = fn self._sender_filesizes = fs #", "Clear the contents of the temp file open(wpath, 'bw').close() #", "output: for w in range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\"", "res def recv_param_set(self): \"\"\" Receive and unpack Receiver's parameter settings.", "self.transfer_type = 'S' if num_workers == 1 else 'M' self.worker_ports", "len(self._worker_ports) self._sending_file = True try: size = 0 for w", "to listen on\") if len(argv) > 1: FROMTERMINAL = True", "value. \"\"\" if self.stopped: return self.current_val += value def display(self):", "be sent\") recv_parser.add_argument('-i', '--ip', required=True, help=\"Host IP address\") recv_parser.add_argument('-p', '--port',", "receiver on the same network. Parameters ---------- ip_address : str", "fs -= len(packet) self._sender_socket.send(b'\\x06') # ACK except timeout: self._progress_bar.stop() return", "2021 <NAME> Permission is hereby granted, free of charge, to", "open(fpath, 'bw') as f: while self._recving_file and f.writable() and fs:", "self._recver_token = self.socket.recv(6) self._transfer_type = unpack_str(self.socket.recv(1)) len_wp = bytes_to_int_s(self.socket.recv(2)) self._worker_ports", "Speeds up transmission rate by using multiple workers. Parameters ----------", "receiver's IP address. port : int The target receiver's main", "'--file', required=True, help=\"Path to the file to be sent\") recv_parser.add_argument('-i',", "printerror(\"Operation timed out\") except Exception: return printerror(\"Error while sending headers", "error, timeout, \\ ntohs, ntohl, htons, htonl, \\ gethostname, \\", "ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO", "workers : :obj:`int`, optional The number of workers to use.", "AssertionError: return printerror(\"Receiver rejected handshake\") except timeout: return printerror('Operation timed", "in range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) with open(wpath,", "multiple workers. Parameters ---------- filepath : str The filepath to", "= False self.start_time = time() self.start_value = self.current_val def stop(self):", "def recv_file(self): \"\"\"Receive a file.\"\"\" if not self.param_set: return printerror(\"Not", "not yet set with parameters\") try: self._recving_file = True path", "int, float The upper limit of the progress bar. length", "return printerror(\"Operation timed out\") except: self._progress_bar.stop() return printerror(f\"Error while sending", "\"\"\" For displaying progress bars. Parameters ---------- max_value : int,", "recver_socket.listen(1) sender_socket, hostaddr = recver_socket.accept() send_tok = sender_socket.recv(6) if send_tok", "for unpacking.\"\"\" res = bytes.hex(byteseq) res = int(res, 16) return", "f: while self._recving_file and fs: packet = self._sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet))", "exit, version_info if version_info < (3, 6): warn('[!] Some features", "== b'\\x06' # ACK except AssertionError: return printerror(\"Receiver rejected handshake\")", "self._progress_bar.stop() return printerror(\"Couldn't access file\") except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't", "True try: for w in range(len(self.worker_ports)): wpath = join( FlyterReciever.storage_dir(self._sender_hostname),", "open(wpath, 'bw').close() # Delete the temp files for w in", "def int_to_bytes_l(integer): \"\"\"Convert 32 - but integer to bytes for", "address of the receiver. main_port : int The main TCP", "ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet) except AssertionError: self._progress_bar.stop() return printerror(\"Receiver", "byte_rescale(data, precision=1): scale = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']", "self._sender_token = None self._sender_filename = None self._sender_filesizes = None self._packet_size", "import get_terminal_size from socket import \\ socket, error, timeout, \\", "fpath): self._workers_active += 1 try: recver_socket = self.workers[worker_num] recver_socket.listen(1) sender_socket,", "AssertionError: return printerror(\"Receiver rejected\") except timeout: return printerror(\"Operation timed out\")", "storage_dirname DEFAULT_PACKET_SIZE = 512 def __init__(self, host_ip, main_port, num_workers): self.host_ip", "else: return True def recv_file(self): \"\"\"Receive a file.\"\"\" if not", "printerror(\"Receiver rejected packet\") except FileNotFoundError: self._progress_bar.stop() return printerror(\"Couldn't access file\")", "> prog prog_bar = '█'*prog + '▌'*extra spaces = '", "target receiver's main TCP port. filepath : str The path", "'*(self.length - (prog + extra)) rate = d_value/d_time if d_time", "to whom the Software is furnished to do so, subject", "try: self._recving_file = True path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename )", "b''.join([len_hn, hn, tok, tr_type, len_wp, wp]) except: return printerror(\"Error building", "else \\ None eta = timedelta(seconds=eta_s) if eta_s is not", "settings used during data transmissions. \"\"\" try: self.socket.connect((self.recver_ip, self.main_port)) except", "35) self._progress_bar.start() def progress_thread(): try: # Wait until receiving file", "transmission rate by using multiple workers. \"\"\" if not self.param_set:", "filepath to the file to be sent. \"\"\" if not", "packet\") except FileNotFoundError: self._progress_bar.stop() self._sending_file = False return printerror(\"Couldn't access", "main TCP port of the receiver. \"\"\" DEFAULT_PACKET_SIZE = 1024", "printerror(\"Sender not yet set with parameters\") try: self._recving_file = True", "= FlyterSender(ip_address, port) sender.recv_param_set() return sender.send_file(filepath) def receive(host_ip_address, port, workers=1):", "data transmissions. \"\"\" try: self.socket.connect((self.recver_ip, self.main_port)) except error: return printerror(\"Can't", "FlyterSender instances. Parameters ---------- host_ip : str The Host IP", "= f.tell() + self._packet_size size = (self._packet_size - max(0, end_size", "required=True, help=\"Target receiver's TCP port number\") send_parser.add_argument('-f', '--file', required=True, help=\"Path", "for progress bar while not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully received:", "same network using raw sockets. Doesn't use encryption. \"\"\" __version__", "self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._recving_file = False self._workers_active = 0 self._progress_bar", "self._recver_hostname = unpack_str(self.socket.recv(len_hn)) self._recver_token = self.socket.recv(6) self._transfer_type = unpack_str(self.socket.recv(1)) len_wp", "except FileNotFoundError: return printerror(\"Couldn't access file\") except PermissionError: return printerror(\"Couldn't", "address. port : int The receiver's host port to listen", "b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet) except KeyboardInterrupt: self._progress_bar.stop()", "return True def recv_file(self): \"\"\"Receive a file.\"\"\" if not self.param_set:", "filepath): \"\"\" Send a file. Parameters ---------- filepath : str", ": :obj:`int`, optional The number of workers to use. \"\"\"", "of the Software, and to permit persons to whom the", "file was unsuccessful\") else: # Wait for progress bar while", "res = ntohl(integer) res = hex(res)[2:] res = '0'*(len(res) %", "True class FlyterReciever: \"\"\" Handles Flyter file receiving processes. Note:", "-= 1 num_workers = len(self._worker_ports) self._sending_file = True try: size", "fsize) elif self._transfer_type == 'M': res = self._send_m(fpath, fsizes) assert", "PermissionError: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Couldn't save file due to", "printerror(\"Couldn't access file\") except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access file", "return printerror(\"Couldn't access file\") except PermissionError: self._progress_bar.stop() self._sending_file = False", "bars. Parameters ---------- max_value : int, float The upper limit", "(the \"Software\"), to deal in the Software without restriction, including", "range(num_workers): self.workers[w].bind((self.host_ip, self.worker_ports[w])) self.workers[w].settimeout(60) except: printerror('Error initializing sockets') self.param_set =", ") send_parser = subparsers.add_parser(\"send\") recv_parser = subparsers.add_parser(\"recv\") send_parser.add_argument('-i', '--ip', required=True,", "file due to permissions\") except error: return printerror(\"Error with sockets\")", "def done(self): \"\"\"Return if already finished.\"\"\" return self.current_val >= self.max_value", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "['B', 'KB', 'MB', 'GB', 'TB', 'PB'] p = int(log(data, 2)/10)", "= join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) with open(path, 'bw') as output:", "file receiving processes. Note: Receives from FlyterSender instances. Parameters ----------", "file\") finally: self._workers_active -= 1 num_workers = len(self._worker_ports) self._sending_file =", "w in range(num_workers) ] if num_workers > 1 else []", "ntohs(integer) res = hex(res)[2:] res = '0'*(len(res) % 2) +", "= ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] p = int(log(data,", "except: return printerror(\"Error while sending headers to sender\") else: self.param_set", "except FileNotFoundError: self._progress_bar.stop() return printerror(\"Couldn't access file\") except PermissionError: self._progress_bar.stop()", "f\"({ProgressBar.byte_rescale(d_value)}) \" f\"[{ProgressBar.byte_rescale(rate)}/s] \" f\"ETA: {eta}\", end=\"\\r\") # Flyter Classes", "while receiving headers\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now receiving", "= 0 self.length = length self.rate = None self.start_time =", "f\" [ {fname} ]\") self.socket.send(headers) print(\"Waiting for receiver to accept", "# ACK else: sender_socket.send(b'\\x15') # NAK fs = self._sender_filesizes[worker_num] with", "1 num_workers = len(self._worker_ports) self._sending_file = True try: size =", "= d_value/d_max_value prog = int(self.length*per) extra = self.length*round(per) > prog", "in the Software without restriction, including without limitation the rights", "self._sending_file = False return printerror(\"Couldn't access file\") except PermissionError: self._progress_bar.stop()", "type=int, default=1, help=\"TCP port to listen on\") if len(argv) >", "max(0, end_size - end)) packet = f.read(size) if not packet:", "<PASSWORD>_<PASSWORD>(6) self.transfer_type = 'S' if num_workers == 1 else 'M'", "self._progress_bar.stop() self._sending_file = False return printerror(\"User aborted operation\") except AssertionError:", "a single worker. Parameters ---------- filepath : str The filepath", "sent\") recv_parser.add_argument('-i', '--ip', required=True, help=\"Host IP address\") recv_parser.add_argument('-p', '--port', type=int,", "return d_value = self.current_val - self.start_value d_max_value = self.max_value -", "else: self._sender_socket.send(b'\\x06') # NAK return printalert(\"Rejected file transfer\") except error:", "file due to permission \" \"error\") except timeout: self._progress_bar.stop() self._sending_file", "with progress thread\") Thread(target=progress_thread).start() self._sender_token = tok self._sender_filename = fn", "while sending file\") else: self._sending_file = False return True def", "= \" \"*(get_terminal_size().columns - 1) print(f\"{clear_line}\\r\" \"Progress: \" f\"|{prog_bar}{spaces}| \"", "f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet) sender_socket.send(b'\\x06') # ACK except KeyboardInterrupt:", "def threadfunc(worker_num, fpath): self._workers_active += 1 try: recver_socket = self.workers[worker_num]", "self._progress_bar.stop() self._sending_file = False return printerror(\"Couldn't access file due to", "packing.\"\"\" res = ntohl(integer) res = hex(res)[2:] res = '0'*(len(res)", "unpacking.\"\"\" res = bytes.hex(byteseq) res = int(res, 16) return htons(res)", "self._progress_bar.add_progress(len(packet)) fs -= len(packet) except KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False", "True # Simplified Functions def send(ip_address, port, filepath): \"\"\" Send", "used during data transmissions. \"\"\" try: printalert(\"Waiting for sender\") self.socket.listen(1)", "handshake\") try: len_hn = bytes_to_int_s(self.socket.recv(2)) self._recver_hostname = unpack_str(self.socket.recv(len_hn)) self._recver_token =", "hn = pack_str(gethostname()) len_hn = int_to_bytes_s(len(hn)) tok = self.token tr_type", "== b'\\x06' # ACK except: return printerror(\"Error while sending headers", "self._workers_active: try: pass except KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False printerror(\"User", "of the temp file open(wpath, 'bw').close() # Delete the temp", "progress.\"\"\" if self.stopped: return d_value = self.current_val - self.start_value d_max_value", "except error: self._progress_bar.stop() return printerror(\"Error with socket\") except: self._progress_bar.stop() return", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,", "receiver.send_param_set() receiver.recv_file() if __name__ == '__main__': parser = ArgumentParser( prog=\"Flyter\",", "join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) fs = self._sender_filesizes[0] with open(path, 'bw')", "main_port : int The main TCP port to be used.", "the file to be sent. \"\"\" if not self.param_set: return", "try: fs = file_size with open(filepath, 'br') as f: while", "while starting to send file\") while self._workers_active: try: pass except", "receiving file while not self._recving_file: pass # Display until file", "directory doesn't exist, creates it first. Parameters ---------- hostname :", "return sender.send_file(filepath) def receive(host_ip_address, port, workers=1): \"\"\" Receive a file", "res return bytes.fromhex(res) def bytes_to_int_l(byteseq): \"\"\"Convert byte sequence to 32", "OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\"", "for w in range(num_workers) ] if num_workers > 1 else", "self._sender_hostname = unpack_str(sender_hn) self._sender_socket.send(b'\\x06') # ACK except timeout: return printerror(\"Operation", "until sending file while not self._sending_file: pass # Display until", "self.worker_ports] wp = b''.join(wp) headers = b''.join([len_hn, hn, tok, tr_type,", "self._recving_file and fs: packet = self._sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -=", "f.writable() and fs: packet = sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -=", "def int_to_bytes_s(integer): \"\"\"Convert 16 - bit integer to bytes for", "printerror(\"Couldn't access file due to permission error\") except: return printerror(\"Error", "printerror(\"Error with progress thread\") Thread(target=progress_thread).start() # Start sending res =", "progress_thread(): try: # Wait until receiving file while not self._recving_file:", "= int(res, 16) return htons(res) def int_to_bytes_l(integer): \"\"\"Convert 32 -", "aborted operation\") self._sending_file = False return True def send_file(self, filepath):", "return printerror(\"Error while waiting for sender\") try: len_sender_hn = bytes_to_int_s(self._sender_socket.recv(2))", "self._sender_filename = fn self._sender_filesizes = fs # Start receiving try:", "def _recv_m(self): \"\"\" Receive a file with multiple workers. Speeds", "sender = FlyterSender(ip_address, port) sender.recv_param_set() return sender.send_file(filepath) def receive(host_ip_address, port,", "= fpath.split(sep)[-1] fsize = stat(fpath).st_size fsizes = [fsize//num_w for w", "return printerror(\"Error while starting to send file\") while self._workers_active: try:", "file to receiver on the same network. Parameters ---------- ip_address", "= parser.add_subparsers( dest=\"action\", help=\"The action to be performed\" ) send_parser", "datetime import timedelta from math import log from os import", "= True try: fs = file_size with open(filepath, 'br') as", "class ProgressBar: \"\"\" For displaying progress bars. Parameters ---------- max_value", "for w in range(num_w)] fsizes[-1] += fsize - sum(fsizes) fn", "for s in range(len_fs)] fs_all = sum(fs) answer = input(f\"{self._sender_hostname}-{b64_tok}\"", "self._progress_bar.stop() self._recving_file = False printerror(\"User aborted operation\") self._recving_file = False", "except AssertionError: return printerror(\"Receiver rejected handshake\") except timeout: return printerror('Operation", "htons(res) def int_to_bytes_l(integer): \"\"\"Convert 32 - but integer to bytes", "connected with socket\") except: self.socket.send(b'\\x15') # NAK return printerror(\"Error getting", "== b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet) except AssertionError:", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN", "printerror('Error initializing sockets') self.param_set = False def __del__(self): if isinstance(self.__dict__.get('socket'),", "self._progress_bar.stop() self._recving_file = False return printerror(\"Operation timed out\") except error:", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "[ random_port(self.host_ip) for w in range(num_workers) ] if num_workers >", "pass # Display until file is received while not self._progress_bar.done:", "fs]) except: return printerror(\"Error while preparing headers\") try: b64_tok =", "DEFAULT_PACKET_SIZE = 1024 def __init__(self, recver_ip, main_port): self.recver_ip = recver_ip", "parameters\") def threadfunc(worker_num, fpath): self._workers_active += 1 try: recver_socket =", "\" f\"is now sending file ({ProgressBar.byte_rescale(fsize)})\") # Progress bar thread", "help=\"Host IP address\") recv_parser.add_argument('-p', '--port', type=int, required=True, help=\"TCP port to", "= len(self.workers) self._recving_file = True try: for w in range(len(self.worker_ports)):", "tok = self.token tr_type = pack_str(self.transfer_type) len_wp = int_to_bytes_s(len(self.worker_ports)) wp", "into a string.\"\"\" return byteseq.decode() # Utility Classes class ProgressBar:", "end=\"\\r\") # Flyter Classes class FlyterSender: \"\"\" Handles Flyter file", "(prog + extra)) rate = d_value/d_time if d_time else float('inf')", "app_dirname = dirname(__file__) appfiles_dirname = join(app_dirname, 'Flyter') if not exists(appfiles_dirname):", "if self.stopped: return self.current_val += value def display(self): \"\"\"Display the", "compatible with the version of your ' 'python interpreter') FROMTERMINAL", "= None self._sender_hostname = None self._sender_token = None self._sender_filename =", "The receiver's host IP address. port : int The receiver's", "progress bars. Parameters ---------- max_value : int, float The upper", "pass self._progress_bar.display() print(f\"\\nSuccessfully received: {self._sender_filename}\") return res def send_param_set(self): \"\"\"", "required=True, help=\"TCP port to listen on\") recv_parser.add_argument('-w', '--workers', type=int, default=1,", "instances. Parameters ---------- host_ip : str The Host IP address", "to \" f\"{self.recver_ip}:{self.main_port}\") try: sender_hn = pack_str(gethostname()) len_sender_hn = int_to_bytes_s(len(sender_hn))", "= self._sender_filesizes[0] with open(path, 'bw') as f: while self._recving_file and", "self.socket = socket(AF_INET, SOCK_STREAM) self.socket.settimeout(60) except: printerror('Error initializing sockets') self.param_set", "range(num_workers) ] if num_workers > 1 else [] self._sender_socket =", "self._progress_bar.display() except: return printerror(\"Error with progress thread\") Thread(target=progress_thread).start() # Start", "timeout: self._progress_bar.stop() return printerror(\"Operation timed out\") except FileNotFoundError: self._progress_bar.stop() return", "self._progress_bar.stop() return printerror(\"Error with socket\") except: self._progress_bar.stop() return printerror(\"Error receiving", "'y': self._sender_socket.send(b'\\x06') # ACK else: self._sender_socket.send(b'\\x06') # NAK return printalert(\"Rejected", "\" f\"{self.recver_ip}:{self.main_port}\") try: sender_hn = pack_str(gethostname()) len_sender_hn = int_to_bytes_s(len(sender_hn)) self.socket.send(b''.join([len_sender_hn,", "self.socket.connect((self.recver_ip, self.main_port)) except error: return printerror(\"Can't connect to \" f\"{self.recver_ip}:{self.main_port}\")", "file\") finally: self._workers_active -= 1 num_workers = len(self.workers) self._recving_file =", "False try: # Build the file path = join( FlyterReciever.storage_dir(self._sender_hostname),", "temp file open(wpath, 'bw').close() # Delete the temp files for", "f\"({ProgressBar.byte_rescale(fs_all)}). \" \"Accept? (y/n) \") if answer.lower() == 'y': self._sender_socket.send(b'\\x06')", ") sock.send(self.token) assert sock.recv(1) == b'\\x06' # ACK fs =", "above copyright notice and this permission notice shall be included", "self.stopped: return self.current_val += value def display(self): \"\"\"Display the current", "args.action == \"send\": send(args.ip, args.port, args.file) elif args.action == \"recv\":", "out\") except: self._progress_bar.stop() self._sending_file = False return printerror(f\"Error while sending", "if not exists(filepath): printerror(\"File doesn't exist\") def threadfunc(worker_num, fpath, start,", ") unlink(wpath) except PermissionError: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Couldn't save", "length : :obj:`int`, optional The length of the progress bar.", "self.max_value or self.stopped def start(self): \"\"\"Start the progress bar.\"\"\" self.stopped", "f.seek(start) while self._sending_file and fs: end_size = f.tell() + self._packet_size", "(y/n) \") if answer.lower() == 'y': self._sender_socket.send(b'\\x06') # ACK else:", "host_storage_dirname = join(storage_dirname, hostname) if not exists(host_storage_dirname): mkdir(host_storage_dirname) return host_storage_dirname", "add_progress(self, value): \"\"\" Count new progress. Parameter --------- value :", "- d_value)/rate) if rate else \\ None eta = timedelta(seconds=eta_s)", "return f\"{r_bytes}{scale[p]}\" def __init__(self, max_value, length=50): self.max_value = max_value self.current_val", "access file\") except PermissionError: self._progress_bar.stop() self._sending_file = False return printerror(\"Couldn't", "open(path, 'bw') as f: while self._recving_file and fs: packet =", "from math import log from os import altsep, sep, \\", "set with receiver's parameters\") if not exists(filepath): printerror(\"File doesn't exist\")", "FlyterReceiver instances. Parameterss ---------- recver_ip : str The IP address", "Start sending res = None try: if self._transfer_type == 'S':", "while sending headers to sender\") else: self.param_set = True #", "def printalert(alert): \"\"\"Print an alert message.\"\"\" global FROMTERMINAL print(f'[!] {alert}')", "self.main_port = main_port self.token = <PASSWORD>_<PASSWORD>(6) self.transfer_type = 'S' if", "finally: self._workers_active -= 1 num_workers = len(self.workers) self._recving_file = True", "save file due to permissions\") except error: return printerror(\"Error with", "with progress thread\") Thread(target=progress_thread).start() # Start sending res = None", "self._sender_socket.send(b'\\x06') # ACK except timeout: self._progress_bar.stop() return printerror(\"Operation timed out\")", "= ProgressBar(None) try: self.socket = socket(AF_INET, SOCK_STREAM) self.socket.bind((self.host_ip, self.main_port)) self.socket.settimeout(60)", "= round(data/pow(2, 10*p), precision) return f\"{r_bytes}{scale[p]}\" def __init__(self, max_value, length=50):", "packet: packet = temp.read(self._packet_size) output.write(packet) # Clear the contents of", "workers) receiver.send_param_set() receiver.recv_file() if __name__ == '__main__': parser = ArgumentParser(", "to any person obtaining a copy of this software and", "def bytes_to_int_l(byteseq): \"\"\"Convert byte sequence to 32 - but integer", "send_tok = sender_socket.recv(6) if send_tok == self._sender_token: sender_socket.send(b'\\x06') # ACK", "self.token = <PASSWORD>_<PASSWORD>(6) self._recver_hostname = None self._recver_token = None self._transfer_type", "len(packet) sender_socket.send(b'\\x06') # ACK except KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False", "int The main TCP port to be used. num_workers :", "access file\") except PermissionError: return printerror(\"Couldn't access file due to", "= bytes_to_int_s(self.socket.recv(2)) self._worker_ports = [bytes_to_int_s(self.socket.recv(2)) for w in range(len_wp)] self.socket.send(b'\\x06')", "person obtaining a copy of this software and associated documentation", "recver_ip self.main_port = main_port self.token = <PASSWORD>_<PASSWORD>(6) self._recver_hostname = None", "{self._recver_hostname}-{b64_tok}:\" f\" [ {fname} ]\") self.socket.send(headers) print(\"Waiting for receiver to", "of the split-up file to be sent. \"\"\" if not", "file open(wpath, 'bw').close() # Delete the temp files for w", "int_to_bytes_s(len(sender_hn)) self.socket.send(b''.join([len_sender_hn, sender_hn])) assert self.socket.recv(1) == b'\\x06' # ACK except", "6): warn('[!] Some features are not be compatible with the", "return printerror(\"Couldn't access file due to permission \" \"error\") except", "a specific sub-command.\" ) subparsers = parser.add_subparsers( dest=\"action\", help=\"The action", "and this permission notice shall be included in all copies", "= pack_str(gethostname()) len_hn = int_to_bytes_s(len(hn)) tok = self.token tr_type =", "return printerror(\"Operation timed out\") except Exception: return printerror(\"Error while sending", "while not self._progress_bar.done: self._progress_bar.display() except: return printerror(\"Error with progress thread\")", "None try: if self._transfer_type == 'S': res = self._send_s(fpath, fsize)", "= join(app_dirname, 'Flyter') if not exists(appfiles_dirname): mkdir(appfiles_dirname) storage_dirname = join(appfiles_dirname,", "---------- filepath : str The filepath to the file to", "import ArgumentParser from base64 import b64encode from datetime import timedelta", "fs -= len(packet) sender_socket.send(b'\\x06') # ACK except KeyboardInterrupt: self._progress_bar.stop() self._recving_file", "\" \"Accept? (y/n) \") if answer.lower() == 'y': self._sender_socket.send(b'\\x06') #", "self.workers: w.close() def _recv_s(self): \"\"\"Receive a file with a single", "0 self._progress_bar = ProgressBar(None) try: self.socket = socket(AF_INET, SOCK_STREAM) self.socket.bind((self.host_ip,", "sender_socket, hostaddr = recver_socket.accept() send_tok = sender_socket.recv(6) if send_tok ==", "required=True, help=\"Target receiver's IP address\") send_parser.add_argument('-p', '--port', type=int, required=True, help=\"Target", "ProgressBar(None) try: self.socket = socket(AF_INET, SOCK_STREAM) self.socket.bind((self.host_ip, self.main_port)) self.socket.settimeout(60) self.workers", "try: self.socket = socket(AF_INET, SOCK_STREAM) self.socket.bind((self.host_ip, self.main_port)) self.socket.settimeout(60) self.workers =", "type=int, required=True, help=\"Target receiver's TCP port number\") send_parser.add_argument('-f', '--file', required=True,", "try: b64_tok = b64encode(self._recver_token).decode() printalert(f\"Sending to {self._recver_hostname}-{b64_tok}:\" f\" [ {fname}", "return printerror(\"Operation timed out\") except: self._progress_bar.stop() self._sending_file = False return", "progress bar. \"\"\" @staticmethod def byte_rescale(data, precision=1): scale = ['B',", "file due to permission error\") except timeout: self._progress_bar.stop() return printerror(\"Operation", "socket\") except: self._progress_bar.stop() return printerror(\"Error receiving file\") else: self._recving_file =", "def pack_str(string): \"\"\"Pack a string into a byte sequence.\"\"\" return", "def _send_m(self, filepath, file_sizes): \"\"\" Send a file with multiple", "# Wait for progress bar while not self._progress_bar.done: pass self._progress_bar.display()", "of the receiver. main_port : int The main TCP port", "f\"|{prog_bar}{spaces}| \" f\"{100*per:.1f}% \" f\"({ProgressBar.byte_rescale(d_value)}) \" f\"[{ProgressBar.byte_rescale(rate)}/s] \" f\"ETA: {eta}\",", "the storage dir for received files. If storage directory doesn't", "== 'S': res = self._send_s(fpath, fsize) elif self._transfer_type == 'M':", "bar thread self._progress_bar = ProgressBar(fs_all, 35) self._progress_bar.start() def progress_thread(): try:", "int The target receiver's main TCP port. filepath : str", "Receive and unpack Receiver's parameter settings. Used to set Sender's", "ProgressBar(fs_all, 35) self._progress_bar.start() def progress_thread(): try: # Wait until receiving", "= main_port self.token = <PASSWORD>_<PASSWORD>(6) self.transfer_type = 'S' if num_workers", "else: return port def printerror(errormsg): \"\"\"Print an error message.\"\"\" global", "self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error while saving file\") else: return", "None else '?' clear_line = \" \"*(get_terminal_size().columns - 1) print(f\"{clear_line}\\r\"", "return printerror(\"Couldn't access file due to permission error\") except: return", "\" f\"{100*per:.1f}% \" f\"({ProgressBar.byte_rescale(d_value)}) \" f\"[{ProgressBar.byte_rescale(rate)}/s] \" f\"ETA: {eta}\", end=\"\\r\")", "---------- host_ip_address : str The receiver's host IP address. port", "tok = self.token num_w = max(1, len(self._worker_ports)) fpath = filepath.replace(altsep,", "str The target receiver's IP address. port : int The", "int(self.length*per) extra = self.length*round(per) > prog prog_bar = '█'*prog +", "except: return printerror(\"Error with progress thread\") Thread(target=progress_thread).start() # Start sending", "argv, exit, version_info if version_info < (3, 6): warn('[!] Some", "print(f\"\\nSuccessfully received: {self._sender_filename}\") return res def send_param_set(self): \"\"\" Pack and", "bytes_to_int_s(self.socket.recv(2)) self._worker_ports = [bytes_to_int_s(self.socket.recv(2)) for w in range(len_wp)] self.socket.send(b'\\x06') #", "documentation files (the \"Software\"), to deal in the Software without", "'bw') as output: for w in range(num_workers): wpath = join(", "args.port, args.file) elif args.action == \"recv\": receive(args.ip, args.port, args.workers) else:", "The amount of workers to be used during transmission. \"\"\"", "without restriction, including without limitation the rights to use, copy,", "receiving file ({ProgressBar.byte_rescale(fs_all)})\") # Progress bar thread self._progress_bar = ProgressBar(fs_all,", "used. main_port : int The main TCP port to be", "fs = file_size with open(filepath, 'br') as f: while self._sending_file", "else: self._sender_socket.send(b'\\x06') # ACK # Wait for progress bar while", ": str The target receiver's IP address. port : int", "= time() - self.start_time per = d_value/d_max_value prog = int(self.length*per)", "file\") else: self._sending_file = False return True def _send_m(self, filepath,", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "printerror(\"Couldn't access file\") except PermissionError: return printerror(\"Couldn't access file due", "512 def __init__(self, host_ip, main_port, num_workers): self.host_ip = host_ip self.main_port", "doesn't exist\") def threadfunc(worker_num, fpath, start, end): self._workers_active += 1", "PermissionError: self._progress_bar.stop() self._sending_file = False return printerror(\"Couldn't access file due", "printerror(\"Error while preparing headers\") try: b64_tok = b64encode(self._recver_token).decode() printalert(f\"Sending to", "self.length*round(per) > prog prog_bar = '█'*prog + '▌'*extra spaces =", "printerror(\"Error while sending headers to sender\") else: self.param_set = True", "error: return printerror(\"Sender isn't available anymore\") except: self._sender_socket.send(b'\\x15') # NAK", "except AssertionError: return printerror(\"Receiver rejected\") except timeout: return printerror(\"Operation timed", "bytes_to_int_l(byteseq): \"\"\"Convert byte sequence to 32 - but integer for", "aborted operation\") except AssertionError: self._progress_bar.stop() self._sending_file = False return printerror(f\"Receiver", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies", "= ArgumentParser( prog=\"Flyter\", epilog=\"See '<command> --help' to read about a", "self.workers: for w in range(num_workers): self.workers[w].bind((self.host_ip, self.worker_ports[w])) self.workers[w].settimeout(60) except: printerror('Error", "been deleted\") except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access storage directory\")", "granted, free of charge, to any person obtaining a copy", "a file with a single worker. Parameters ---------- filepath :", "__license__ = \"\"\" MIT License Copyright (c) 2021 <NAME> Permission", "filepath): \"\"\" Send file to receiver on the same network.", "self._sender_socket.send(b'\\x06') # ACK # Wait for progress bar while not", "import log from os import altsep, sep, \\ mkdir, stat,", "the progress bar.\"\"\" self.stopped = False self.start_time = time() self.start_value", "self._progress_bar.stop() self._recving_file = False return printerror(\"Error while receiving file\") finally:", "= int_to_bytes_s(num_w) headers = b''.join([tok, len_fn, fn, len_fs, fs]) except:", "but integer to bytes for packing.\"\"\" res = ntohl(integer) res", "received files. If storage directory doesn't exist, creates it first.", "host_ip self.main_port = main_port self.token = <PASSWORD>_<PASSWORD>(6) self.transfer_type = 'S'", "tr_type = pack_str(self.transfer_type) len_wp = int_to_bytes_s(len(self.worker_ports)) wp = [int_to_bytes_s(port) for", "\"\"\" Count new progress. Parameter --------- value : int, float", "break self.socket.send(packet) assert self.socket.recv(1) == b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs", "sep, \\ mkdir, stat, unlink from os.path import dirname, exists,", "your ' 'python interpreter') FROMTERMINAL = False # Utility Functions", "= filepath.replace(altsep, sep) fname = fpath.split(sep)[-1] fsize = stat(fpath).st_size fsizes", "join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) with open(path, 'bw') as output: for", "finally: self._workers_active -= 1 num_workers = len(self._worker_ports) self._sending_file = True", "self.param_set: return printerror(\"Sender not yet set with parameters\") try: self._recving_file", "NAK return printerror(\"Error while saving file\") else: return True def", "receiver's parameters\") if not exists(filepath): return printerror(\"File doesn't exist\") #", "[bytes_to_int_l(self._sender_socket.recv(4)) for s in range(len_fs)] fs_all = sum(fs) answer =", "printerror(\"User aborted operation\") except AssertionError: self._progress_bar.stop() self._sending_file = False return", "True def _recv_m(self): \"\"\" Receive a file with multiple workers.", "ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION", "not packet: break self.socket.send(packet) assert self.socket.recv(1) == b'\\x06' # ACK", "THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from", "self._sending_file = False return printerror(f\"Sending file was unsuccessful\") else: #", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "parameters\") if not exists(filepath): return printerror(\"File doesn't exist\") # Headers", "file path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) with open(path, 'bw')", "self._recver_token = None self._transfer_type = None self._worker_ports = None self._packet_size", "def add_progress(self, value): \"\"\" Count new progress. Parameter --------- value", "port, workers=1): \"\"\" Receive a file from sender on the", "workers. Parameters ---------- filepath : str The filepath to the", "FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) Thread( target=threadfunc, args=(w, wpath), ).start() except FileNotFoundError:", "res = self._recv_s() elif self.transfer_type == 'M': res = self._recv_m()", "16) return htonl(res) def pack_str(string): \"\"\"Pack a string into a", "int, float Added progress value. \"\"\" if self.stopped: return self.current_val", "ACK else: sender_socket.send(b'\\x15') # NAK fs = self._sender_filesizes[worker_num] with open(fpath,", "recv_parser.add_argument('-w', '--workers', type=int, default=1, help=\"TCP port to listen on\") if", "as f: while self._recving_file and fs: packet = self._sender_socket.recv(self._packet_size) f.write(packet)", "= self._sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet) self._sender_socket.send(b'\\x06') # ACK", "else: self._recving_file = False return True def _recv_m(self): \"\"\" Receive", "notice shall be included in all copies or substantial portions", "fs # Start receiving try: if self.transfer_type == 'S': res", "KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False return printerror(\"User aborted operation\") self._sending_file", "\"\"\" try: printalert(\"Waiting for sender\") self.socket.listen(1) self._sender_socket, addrport = self.socket.accept()", "hostaddr = recver_socket.accept() send_tok = sender_socket.recv(6) if send_tok == self._sender_token:", "rate = d_value/d_time if d_time else float('inf') eta_s = round((d_max_value", "def byte_rescale(data, precision=1): scale = ['B', 'KB', 'MB', 'GB', 'TB',", "start with open(fpath, 'br') as f: f.seek(start) while self._sending_file and", "\"\"\" app_dirname = dirname(__file__) appfiles_dirname = join(app_dirname, 'Flyter') if not", "a file with a single worker.\"\"\" if not self.param_set: return", "filepath, file_size): \"\"\" Send a file with a single worker.", "Parameters ---------- hostname : str The name of the subdirectory", "range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) with open(wpath, 'br')", "except KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False return printerror(\"User aborted operation\")", "displaying progress bars. Parameters ---------- max_value : int, float The", "self._progress_bar.stop() return printerror(f\"Error while sending file\") else: self._sending_file = False", "printerror(f\"Receiver rejected packet\") except FileNotFoundError: self._progress_bar.stop() self._sending_file = False return", "The target receiver's main TCP port. filepath : str The", "b'\\x06' # ACK except: self._progress_bar.stop() self._sending_file = False return printerror(f\"Sending", "bytes_to_int_s(self._sender_socket.recv(2)) sender_hn = self._sender_socket.recv(len_sender_hn) self._sender_hostname = unpack_str(sender_hn) self._sender_socket.send(b'\\x06') # ACK", "threading import Thread from time import time from warnings import", "int The receiver's host port to listen on. workers :", "return printerror(\"Sender not yet set with parameters\") try: self._recving_file =", "if isinstance(self.__dict__.get('socket'), socket): self.socket.close() if self.__dict__.get('workers'): for w in self.workers:", "max_value, length=50): self.max_value = max_value self.current_val = 0 self.length =", "if hostname: host_storage_dirname = join(storage_dirname, hostname) if not exists(host_storage_dirname): mkdir(host_storage_dirname)", "thread self._progress_bar = ProgressBar(fsize, 40) self._progress_bar.start() def progress_thread(): try: #", "subparsers = parser.add_subparsers( dest=\"action\", help=\"The action to be performed\" )", "wp = b''.join(wp) headers = b''.join([len_hn, hn, tok, tr_type, len_wp,", "self._progress_bar.stop() self._sending_file = False return printerror(f\"Sending file was unsuccessful\") else:", "res = self._send_s(fpath, fsize) elif self._transfer_type == 'M': res =", "in range(num_workers) ] if num_workers > 1 else [] if", "the split-up file to be sent. \"\"\" if not self.param_set:", "= timedelta(seconds=eta_s) if eta_s is not None else '?' clear_line", "---------- max_value : int, float The upper limit of the", "if num_workers > 1 else [] self._sender_socket = None self._sender_hostname", "fsize = stat(fpath).st_size fsizes = [fsize//num_w for w in range(num_w)]", "to be used during transmission. \"\"\" @staticmethod def storage_dir(hostname=None): \"\"\"", "sent. \"\"\" sender = FlyterSender(ip_address, port) sender.recv_param_set() return sender.send_file(filepath) def", "self.socket.recv(1) == b'\\x06' # ACK except AssertionError: return printerror(\"Receiver rejected", "= False def __del__(self): if isinstance(self.socket, socket): self.socket.close() def _send_s(self,", "open(wpath, 'br') as temp: packet = True while packet: packet", "None self._recver_token = None self._transfer_type = None self._worker_ports = None", "Parameterss ---------- recver_ip : str The IP address of the", "[int_to_bytes_s(port) for port in self.worker_ports] wp = b''.join(wp) headers =", "return printerror(\"Receiver rejected packet\") except FileNotFoundError: self._progress_bar.stop() return printerror(\"Couldn't access", "value : int, float Added progress value. \"\"\" if self.stopped:", "the current progress.\"\"\" if self.stopped: return d_value = self.current_val -", "False def __del__(self): if isinstance(self.__dict__.get('socket'), socket): self.socket.close() if self.__dict__.get('workers'): for", "Note: Receives from FlyterSender instances. Parameters ---------- host_ip : str", "copies of the Software, and to permit persons to whom", "return self.current_val >= self.max_value or self.stopped def start(self): \"\"\"Start the", "\"\"\" if self.stopped: return self.current_val += value def display(self): \"\"\"Display", "printerror(\"Error during handshake\") try: hn = pack_str(gethostname()) len_hn = int_to_bytes_s(len(hn))", "elif self._transfer_type == 'M': res = self._send_m(fpath, fsizes) assert self.socket.recv(1)", "to deal in the Software without restriction, including without limitation", "printerror('Operation timed out') except: return printerror(\"Error during handshake\") try: len_hn", "yet set with parameters\") def threadfunc(worker_num, fpath): self._workers_active += 1", "'br') as f: while self._sending_file and fs: packet = f.read(self._packet_size)", "b64encode(self._recver_token).decode() printalert(f\"Sending to {self._recver_hostname}-{b64_tok}:\" f\" [ {fname} ]\") self.socket.send(headers) print(\"Waiting", "now receiving file ({ProgressBar.byte_rescale(fs_all)})\") # Progress bar thread self._progress_bar =", "error\") while self._workers_active: try: pass except KeyboardInterrupt: self._progress_bar.stop() self._recving_file =", "Display until file is sent while not self._progress_bar.done: self._progress_bar.display() except:", "printerror(\"No sender available\") except: return printerror(\"Error while waiting for sender\")", "unpack_str(self.socket.recv(1)) len_wp = bytes_to_int_s(self.socket.recv(2)) self._worker_ports = [bytes_to_int_s(self.socket.recv(2)) for w in", "fsizes[-1] += fsize - sum(fsizes) fn = pack_str(fname) len_fn =", "port to listen on\") recv_parser.add_argument('-w', '--workers', type=int, default=1, help=\"TCP port", "\"\"\"Print an alert message.\"\"\" global FROMTERMINAL print(f'[!] {alert}') def int_to_bytes_s(integer):", "return printerror(\"Receiver rejected handshake\") except timeout: return printerror('Operation timed out')", ": int The main TCP port to be used. num_workers", "else: res = None except: self._progress_bar.stop() self._recving_file = False return", "use. \"\"\" receiver = FlyterReciever(host_ip_address, port, workers) receiver.send_param_set() receiver.recv_file() if", "printerror(\"Sender not yet set with parameters\") def threadfunc(worker_num, fpath): self._workers_active", "bar.\"\"\" self.stopped = True def add_progress(self, value): \"\"\" Count new", "res return bytes.fromhex(res) def bytes_to_int_s(byteseq): \"\"\"Convert byte sequence to 16", "filepath to the file to be sent. file_sizes : list(int)", "= True args = parser.parse_args() if args.action == \"send\": send(args.ip,", "sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet) sender_socket.send(b'\\x06') # ACK except", "while self._workers_active: try: pass except KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False", "specific sub-command.\" ) subparsers = parser.add_subparsers( dest=\"action\", help=\"The action to", "limitation the rights to use, copy, modify, merge, publish, distribute,", "subject to the following conditions: The above copyright notice and", "The name of the subdirectory where that host's sent files", "initializing sockets') self.param_set = False def __del__(self): if isinstance(self.__dict__.get('socket'), socket):", "fn = pack_str(fname) len_fn = int_to_bytes_s(len(fn)) fs = [int_to_bytes_l(s) for", "True path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) fs = self._sender_filesizes[0]", "--help' to read about a specific sub-command.\" ) subparsers =", "used during data transmissions. \"\"\" try: self.socket.connect((self.recver_ip, self.main_port)) except error:", "file_size): \"\"\" Send a file with a single worker. Parameters", "except: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error while receiving headers\") print(f\"[", "if not exists(storage_dirname): mkdir(storage_dirname) if hostname: host_storage_dirname = join(storage_dirname, hostname)", "during transmission. \"\"\" @staticmethod def storage_dir(hostname=None): \"\"\" Return the path", "r_bytes = round(data/pow(2, 10*p), precision) return f\"{r_bytes}{scale[p]}\" def __init__(self, max_value,", "except: self._progress_bar.stop() self._recving_file = False return printerror(\"Receiving file was unsuccessful\")", "a file from sender on the same network. Parameters ----------", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A", "sock: sock.connect( (self.recver_ip, self._worker_ports[worker_num]) ) sock.send(self.token) assert sock.recv(1) == b'\\x06'", "except: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error while saving file\") else:", "exist\") # Headers try: tok = self.token num_w = max(1,", "= None self._recver_token = None self._transfer_type = None self._worker_ports =", "as sock: sock.connect( (self.recver_ip, self._worker_ports[worker_num]) ) sock.send(self.token) assert sock.recv(1) ==", "Progress bar thread self._progress_bar = ProgressBar(fsize, 40) self._progress_bar.start() def progress_thread():", "Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "The upper limit of the progress bar. length : :obj:`int`,", "try: # Build the file path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename", "max_value self.current_val = 0 self.length = length self.rate = None", "to send file\") while self._workers_active: try: pass except KeyboardInterrupt: self._progress_bar.stop()", "# NAK return printerror(\"Error getting parameters from receiver\") else: self.param_set", ": str The receiver's host IP address. port : int", "+ file_sizes[w] ), ).start() size += file_sizes[w] except FileNotFoundError: return", "SOFTWARE. \"\"\" from argparse import ArgumentParser from base64 import b64encode", "fs: end_size = f.tell() + self._packet_size size = (self._packet_size -", "settings used during data transmissions. \"\"\" try: printalert(\"Waiting for sender\")", "to listen on. workers : :obj:`int`, optional The number of", "required=True, help=\"Path to the file to be sent\") recv_parser.add_argument('-i', '--ip',", "Thread( target=threadfunc, args=( w, filepath, size, size + file_sizes[w] ),", "__init__(self, max_value, length=50): self.max_value = max_value self.current_val = 0 self.length", "file\") except PermissionError: self._progress_bar.stop() self._sending_file = False return printerror(\"Couldn't access", "= dirname(__file__) appfiles_dirname = join(app_dirname, 'Flyter') if not exists(appfiles_dirname): mkdir(appfiles_dirname)", "to read about a specific sub-command.\" ) subparsers = parser.add_subparsers(", "print(f'[!] {alert}') def int_to_bytes_s(integer): \"\"\"Convert 16 - bit integer to", "random_port(host): \"\"\"Return a random available TCP port.\"\"\" while True: port", "rate by using multiple workers. \"\"\" if not self.param_set: return", "else: sender_socket.send(b'\\x15') # NAK fs = self._sender_filesizes[worker_num] with open(fpath, 'bw')", "= None try: if self._transfer_type == 'S': res = self._send_s(fpath,", "warn from sys import argv, exit, version_info if version_info <", "and send Receiver's parameter settings. Used to set Sender's parameter", "from sys import argv, exit, version_info if version_info < (3,", "= bytes_to_int_s(self._sender_socket.recv(2)) sender_hn = self._sender_socket.recv(len_sender_hn) self._sender_hostname = unpack_str(sender_hn) self._sender_socket.send(b'\\x06') #", "for port in self.worker_ports] wp = b''.join(wp) headers = b''.join([len_hn,", "Receives from FlyterSender instances. Parameters ---------- host_ip : str The", "due to permission error\") except timeout: self._progress_bar.stop() return printerror(\"Operation timed", "parser = ArgumentParser( prog=\"Flyter\", epilog=\"See '<command> --help' to read about", "be used. num_workers : int The amount of workers to", "len(self._worker_ports)) fpath = filepath.replace(altsep, sep) fname = fpath.split(sep)[-1] fsize =", "same network. Parameters ---------- host_ip_address : str The receiver's host", "= pack_str(self.transfer_type) len_wp = int_to_bytes_s(len(self.worker_ports)) wp = [int_to_bytes_s(port) for port", "not None else '?' clear_line = \" \"*(get_terminal_size().columns - 1)", "not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully received: {self._sender_filename}\") return res def", "listen on. workers : :obj:`int`, optional The number of workers", "to 32 - but integer for unpacking.\"\"\" res = bytes.hex(byteseq)", "file transfer\") except error: return printerror(\"Sender isn't available anymore\") except:", "self._packet_size size = (self._packet_size - max(0, end_size - end)) packet", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "= self.token num_w = max(1, len(self._worker_ports)) fpath = filepath.replace(altsep, sep)", "PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access file due to permission error\")", "data else 0 r_bytes = round(data/pow(2, 10*p), precision) return f\"{r_bytes}{scale[p]}\"", "unpacking.\"\"\" res = bytes.hex(byteseq) res = int(res, 16) return htonl(res)", "receiver's IP address\") send_parser.add_argument('-p', '--port', type=int, required=True, help=\"Target receiver's TCP", "packet = f.read(size) if not packet: break sock.send(packet) assert sock.recv(1)", "prog_bar = '█'*prog + '▌'*extra spaces = ' '*(self.length -", "yet set with receiver's parameters\") # Headers try: tok =", "[int_to_bytes_l(s) for s in fsizes] fs = b''.join(fs) len_fs =", "receiving processes. Note: Receives from FlyterSender instances. Parameters ---------- host_ip", "sending file\") else: self._sending_file = False return True def _send_m(self,", "self._sending_file = False return True def send_file(self, filepath): \"\"\" Send", "b''.join(fs) len_fs = int_to_bytes_s(num_w) headers = b''.join([tok, len_fn, fn, len_fs,", "return self.current_val += value def display(self): \"\"\"Display the current progress.\"\"\"", "res = None try: if self._transfer_type == 'S': res =", "return port def printerror(errormsg): \"\"\"Print an error message.\"\"\" global FROMTERMINAL", "return printerror(\"User aborted operation\") self._sending_file = False return True def", "sender_socket.send(b'\\x15') # NAK fs = self._sender_filesizes[worker_num] with open(fpath, 'bw') as", "progress value. \"\"\" if self.stopped: return self.current_val += value def", "f\"is now receiving file ({ProgressBar.byte_rescale(fs_all)})\") # Progress bar thread self._progress_bar", "True args = parser.parse_args() if args.action == \"send\": send(args.ip, args.port,", "number\") send_parser.add_argument('-f', '--file', required=True, help=\"Path to the file to be", "= None try: self.socket = socket(AF_INET, SOCK_STREAM) self.socket.settimeout(60) except: printerror('Error", "# ACK except KeyboardInterrupt: return printerror(\"User aborted operation\") except AssertionError:", "= unpack_str(self.socket.recv(1)) len_wp = bytes_to_int_s(self.socket.recv(2)) self._worker_ports = [bytes_to_int_s(self.socket.recv(2)) for w", "] \" f\"is now sending file ({ProgressBar.byte_rescale(fsize)})\") # Progress bar", "set Sender's parameter settings used during data transmissions. \"\"\" try:", "\" f\"[{ProgressBar.byte_rescale(rate)}/s] \" f\"ETA: {eta}\", end=\"\\r\") # Flyter Classes class", "doesn't exist\") self._sending_file = True try: fs = file_size with", "the following conditions: The above copyright notice and this permission", "sock.recv(1) == b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet) except", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF", "worker.\"\"\" if not self.param_set: return printerror(\"Sender not yet set with", "return printerror(\"Operation timed out\") except FileNotFoundError: self._progress_bar.stop() return printerror(\"Downloading file", "# ACK # Wait for progress bar while not self._progress_bar.done:", "receiver. \"\"\" DEFAULT_PACKET_SIZE = 1024 def __init__(self, recver_ip, main_port): self.recver_ip", "\"\"\" @staticmethod def storage_dir(hostname=None): \"\"\" Return the path of the", "[] if self.workers: for w in range(num_workers): self.workers[w].bind((self.host_ip, self.worker_ports[w])) self.workers[w].settimeout(60)", "\"\"\"Convert byte sequence to 16 - but integer for unpacking.\"\"\"", "{gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now sending file ({ProgressBar.byte_rescale(fsize)})\") # Progress", "but integer for unpacking.\"\"\" res = bytes.hex(byteseq) res = int(res,", "except KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False printerror(\"User aborted operation\") self._recving_file", "available\") except: return printerror(\"Error while waiting for sender\") try: len_sender_hn", "\"\"\" Receive a file from sender on the same network.", "timeout: return printerror(\"Operation timed out\") except: return printerror(\"Error during handshake\")", "receiver\") else: self.param_set = True class FlyterReciever: \"\"\" Handles Flyter", "# Display until file is received while not self._progress_bar.done: self._progress_bar.display()", "FROMTERMINAL print(f'[!] {alert}') def int_to_bytes_s(integer): \"\"\"Convert 16 - bit integer", "= True class FlyterReciever: \"\"\" Handles Flyter file receiving processes.", "bit integer to bytes for packing.\"\"\" res = ntohs(integer) res", "= self.length*round(per) > prog prog_bar = '█'*prog + '▌'*extra spaces", "= 1024 def __init__(self, recver_ip, main_port): self.recver_ip = recver_ip self.main_port", "current progress.\"\"\" if self.stopped: return d_value = self.current_val - self.start_value", "return printerror(\"Error while sending headers to sender\") else: self.param_set =", "len(packet) except KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False return printerror(\"User aborted", "socket): self.socket.close() if self.__dict__.get('workers'): for w in self.workers: w.close() def", "optional The number of workers to use. \"\"\" receiver =", "# ACK except timeout: return printerror(\"Operation timed out\") except: return", "else: self.param_set = True # Simplified Functions def send(ip_address, port,", "the receiver. \"\"\" DEFAULT_PACKET_SIZE = 1024 def __init__(self, recver_ip, main_port):", "16 - but integer for unpacking.\"\"\" res = bytes.hex(byteseq) res", "a string into a byte sequence.\"\"\" return string.encode() def unpack_str(byteseq):", "file to be sent. \"\"\" if not self.param_set: return printerror(\"Not", "False return printerror(f\"Error while sending file\") finally: self._workers_active -= 1", "num_workers > 1 else [] if self.workers: for w in", "headers\") try: b64_tok = b64encode(self._recver_token).decode() printalert(f\"Sending to {self._recver_hostname}-{b64_tok}:\" f\" [", "self.socket.listen(1) self._sender_socket, addrport = self.socket.accept() except timeout: return printerror(\"No sender", "d_time = time() - self.start_time per = d_value/d_max_value prog =", "in range(len_fs)] fs_all = sum(fs) answer = input(f\"{self._sender_hostname}-{b64_tok}\" f\" wants", "import time from warnings import warn from sys import argv,", "-= len(packet) except AssertionError: self._progress_bar.stop() return printerror(\"Receiver rejected packet\") except", "= <PASSWORD>_<PASSWORD>(6) self._recver_hostname = None self._recver_token = None self._transfer_type =", "up transmission rate by using multiple workers. \"\"\" if not", "with a single worker.\"\"\" if not self.param_set: return printerror(\"Sender not", "unpack_str(self._sender_socket.recv(len_fn)) len_fs = bytes_to_int_s(self._sender_socket.recv(2)) fs = [bytes_to_int_l(self._sender_socket.recv(4)) for s in", "= [fsize//num_w for w in range(num_w)] fsizes[-1] += fsize -", "len_fn = int_to_bytes_s(len(fn)) fs = [int_to_bytes_l(s) for s in fsizes]", "deleted\") except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access storage directory\") except", "bar. \"\"\" @staticmethod def byte_rescale(data, precision=1): scale = ['B', 'KB',", "return printerror(\"Can't connect to \" f\"{self.recver_ip}:{self.main_port}\") try: sender_hn = pack_str(gethostname())", ": int The receiver's host port to listen on. workers", "return printerror('Operation timed out') except: return printerror(\"Error during handshake\") try:", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "that host's sent files are stored. \"\"\" app_dirname = dirname(__file__)", "receiving try: if self.transfer_type == 'S': res = self._recv_s() elif", "f\"{r_bytes}{scale[p]}\" def __init__(self, max_value, length=50): self.max_value = max_value self.current_val =", "sum(fs) answer = input(f\"{self._sender_hostname}-{b64_tok}\" f\" wants to send: {fn} \"", "parameters from receiver\") else: self.param_set = True class FlyterReciever: \"\"\"", "headers\") try: self._sender_socket.send(headers) assert self._sender_socket.recv(1) == b'\\x06' # ACK except:", ").start() size += file_sizes[w] except FileNotFoundError: return printerror(\"Couldn't access file\")", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "workers. \"\"\" if not self.param_set: return printerror(\"Sender not yet set", "wpath), ).start() except FileNotFoundError: return printerror(\"Couldn't access file\") except PermissionError:", "'bw') as f: while self._recving_file and f.writable() and fs: packet", "fpath = filepath.replace(altsep, sep) fname = fpath.split(sep)[-1] fsize = stat(fpath).st_size", "- but integer for unpacking.\"\"\" res = bytes.hex(byteseq) res =", "return True def _send_m(self, filepath, file_sizes): \"\"\" Send a file", "end_size - end)) packet = f.read(size) if not packet: break", "from os import altsep, sep, \\ mkdir, stat, unlink from", "printerror(\"Error while receiving headers\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "= False # Utility Functions def random_port(host): \"\"\"Return a random", "+= 1 try: with socket(AF_INET, SOCK_STREAM) as sock: sock.connect( (self.recver_ip,", "printerror(\"User aborted operation\") except timeout: self._progress_bar.stop() self._recving_file = False return", "self.workers[w].bind((self.host_ip, self.worker_ports[w])) self.workers[w].settimeout(60) except: printerror('Error initializing sockets') self.param_set = False", "> 1 else [] if self.workers: for w in range(num_workers):", "= ntohs(integer) res = hex(res)[2:] res = '0'*(len(res) % 2)", "sender_hn])) assert self.socket.recv(1) == b'\\x06' # ACK except AssertionError: return", "aborted operation\") self._recving_file = False try: # Build the file", "str The IP address of the receiver. main_port : int", "out\") except FileNotFoundError: self._progress_bar.stop() return printerror(\"Downloading file has been deleted\")", "storage directory doesn't exist, creates it first. Parameters ---------- hostname", "= False try: # Build the file path = join(", "warn('[!] Some features are not be compatible with the version", "accept file\") assert self.socket.recv(1) == b'\\x06' # ACK except KeyboardInterrupt:", "TCP port of the receiver. \"\"\" DEFAULT_PACKET_SIZE = 1024 def", "random_port(self.host_ip) for w in range(num_workers) ] if num_workers > 1", "int_to_bytes_s(len(self.worker_ports)) wp = [int_to_bytes_s(port) for port in self.worker_ports] wp =", "int(log(data, 2)/10) if data else 0 r_bytes = round(data/pow(2, 10*p),", "\" \"error\") except timeout: self._progress_bar.stop() self._sending_file = False return printerror(\"Operation", "except: self._progress_bar.stop() self._recving_file = False return printerror(\"Error while receiving file\")", "file with a single worker. Parameters ---------- filepath : str", "return printerror(\"Error during handshake\") try: len_hn = bytes_to_int_s(self.socket.recv(2)) self._recver_hostname =", "send_parser.add_argument('-i', '--ip', required=True, help=\"Target receiver's IP address\") send_parser.add_argument('-p', '--port', type=int,", "progress thread\") Thread(target=progress_thread).start() # Start sending res = None try:", "the file to be sent. file_sizes : list(int) The sizes", "main_port, num_workers): self.host_ip = host_ip self.main_port = main_port self.token =", "w in range(len(self.worker_ports)): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) Thread(", "self._transfer_type = unpack_str(self.socket.recv(1)) len_wp = bytes_to_int_s(self.socket.recv(2)) self._worker_ports = [bytes_to_int_s(self.socket.recv(2)) for", "dirname, exists, join from random import randint from secrets import", "- self.start_time per = d_value/d_max_value prog = int(self.length*per) extra =", "if not packet: break sock.send(packet) assert sock.recv(1) == b'\\x06' #", "'PB'] p = int(log(data, 2)/10) if data else 0 r_bytes", "error: return printerror(\"Error getting connected with socket\") except: self.socket.send(b'\\x15') #", "= temp.read(self._packet_size) output.write(packet) # Clear the contents of the temp", "exists(filepath): return printerror(\"File doesn't exist\") # Headers try: tok =", "True try: fs = file_size with open(filepath, 'br') as f:", "or self.stopped def start(self): \"\"\"Start the progress bar.\"\"\" self.stopped =", "elif self.transfer_type == 'M': res = self._recv_m() else: res =", "d_max_value = self.max_value - self.start_value d_time = time() - self.start_time", "def storage_dir(hostname=None): \"\"\" Return the path of the storage dir", "waiting for sender\") try: len_sender_hn = bytes_to_int_s(self._sender_socket.recv(2)) sender_hn = self._sender_socket.recv(len_sender_hn)", "for w in range(len(self.worker_ports)): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" )", "= <PASSWORD>_<PASSWORD>(6) self.transfer_type = 'S' if num_workers == 1 else", "= end - start with open(fpath, 'br') as f: f.seek(start)", "processes. Note: Receives from FlyterSender instances. Parameters ---------- host_ip :", "d_value = self.current_val - self.start_value d_max_value = self.max_value - self.start_value", "printerror(\"Operation timed out\") except: self._progress_bar.stop() return printerror(f\"Error while sending file\")", "Start receiving try: if self.transfer_type == 'S': res = self._recv_s()", "= f.read(self._packet_size) if not packet: break self.socket.send(packet) assert self.socket.recv(1) ==", "for w in range(num_workers): Thread( target=threadfunc, args=( w, filepath, size,", "distribute, sublicense, and/or sell copies of the Software, and to", "b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet) except AssertionError: self._progress_bar.stop()", "transmission rate by using multiple workers. Parameters ---------- filepath :", "dirname(__file__) appfiles_dirname = join(app_dirname, 'Flyter') if not exists(appfiles_dirname): mkdir(appfiles_dirname) storage_dirname", "stat(fpath).st_size fsizes = [fsize//num_w for w in range(num_w)] fsizes[-1] +=", "to be sent. \"\"\" sender = FlyterSender(ip_address, port) sender.recv_param_set() return", "len_fs = int_to_bytes_s(num_w) headers = b''.join([tok, len_fn, fn, len_fs, fs])", "version of your ' 'python interpreter') FROMTERMINAL = False #", "return res def recv_param_set(self): \"\"\" Receive and unpack Receiver's parameter", "self._sender_token: sender_socket.send(b'\\x06') # ACK else: sender_socket.send(b'\\x15') # NAK fs =", "exists(storage_dirname): mkdir(storage_dirname) if hostname: host_storage_dirname = join(storage_dirname, hostname) if not", "a file with multiple workers. Speeds up transmission rate by", "FlyterSender(ip_address, port) sender.recv_param_set() return sender.send_file(filepath) def receive(host_ip_address, port, workers=1): \"\"\"", "rate else \\ None eta = timedelta(seconds=eta_s) if eta_s is", "on\") if len(argv) > 1: FROMTERMINAL = True args =", "-= len(packet) except KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False return printerror(\"User", "new progress. Parameter --------- value : int, float Added progress", "if num_workers == 1 else 'M' self.worker_ports = [ random_port(self.host_ip)", "self.stopped: return d_value = self.current_val - self.start_value d_max_value = self.max_value", "for received files. If storage directory doesn't exist, creates it", "len_fs, fs]) except: return printerror(\"Error while preparing headers\") try: b64_tok", "with multiple workers. Speeds up transmission rate by using multiple", "res = bytes.hex(byteseq) res = int(res, 16) return htonl(res) def", "= self._sender_socket.recv(6) b64_tok = b64encode(tok).decode() len_fn = bytes_to_int_s(self._sender_socket.recv(2)) fn =", "= b64encode(self._recver_token).decode() printalert(f\"Sending to {self._recver_hostname}-{b64_tok}:\" f\" [ {fname} ]\") self.socket.send(headers)", "headers\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now receiving file ({ProgressBar.byte_rescale(fs_all)})\")", "# Wait until receiving file while not self._recving_file: pass #", "= [int_to_bytes_l(s) for s in fsizes] fs = b''.join(fs) len_fs", ">= self.max_value or self.stopped def start(self): \"\"\"Start the progress bar.\"\"\"", "in range(num_workers): self.workers[w].bind((self.host_ip, self.worker_ports[w])) self.workers[w].settimeout(60) except: printerror('Error initializing sockets') self.param_set", "False def __del__(self): if isinstance(self.socket, socket): self.socket.close() def _send_s(self, filepath,", "= randint(10_000, 65536) with socket(AF_INET, SOCK_STREAM) as sock: try: sock.bind((host,", "sending file while not self._sending_file: pass # Display until file", "max(1, len(self._worker_ports)) fpath = filepath.replace(altsep, sep) fname = fpath.split(sep)[-1] fsize", "printerror(\"Couldn't access file due to permission \" \"error\") except timeout:", "\\ AF_INET, SOCK_STREAM from threading import Thread from time import", "not self.param_set: return printerror(\"Sender not yet set with parameters\") try:", "ArgumentParser( prog=\"Flyter\", epilog=\"See '<command> --help' to read about a specific", "printerror(\"Operation timed out\") except FileNotFoundError: self._progress_bar.stop() return printerror(\"Downloading file has", "permissions\") except error: return printerror(\"Error with sockets\") except: self._sender_socket.send(b'\\x15') #", "int(res, 16) return htonl(res) def pack_str(string): \"\"\"Pack a string into", "self.__dict__.get('workers'): for w in self.workers: w.close() def _recv_s(self): \"\"\"Receive a", "try: self._sender_socket.send(headers) assert self._sender_socket.recv(1) == b'\\x06' # ACK except: return", "] if num_workers > 1 else [] self._sender_socket = None", "self._progress_bar.start() def progress_thread(): try: # Wait until sending file while", "\"\"\" try: self.socket.connect((self.recver_ip, self.main_port)) except error: return printerror(\"Can't connect to", "self._worker_ports = None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._sending_file = False self._workers_active", "printerror(\"Receiving file was unsuccessful\") else: self._sender_socket.send(b'\\x06') # ACK # Wait", "not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully sent: {fname}\") return res def", "while sending file\") finally: self._workers_active -= 1 num_workers = len(self._worker_ports)", "IP address. port : int The target receiver's main TCP", "FROMTERMINAL if FROMTERMINAL: print(f'\\n[x] {errormsg}') exit(-1) exit(-1) exit(-1) exit(-1) else:", "except KeyboardInterrupt: return printerror(\"User aborted operation\") except AssertionError: return printerror(\"Receiver", "= self._send_m(fpath, fsizes) assert self.socket.recv(1) == b'\\x06' # ACK except:", "to do so, subject to the following conditions: The above", "{fn} \" f\"({ProgressBar.byte_rescale(fs_all)}). \" \"Accept? (y/n) \") if answer.lower() ==", "WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "# NAK return printerror(\"Couldn't save file due to permissions\") except", "the progress bar. \"\"\" @staticmethod def byte_rescale(data, precision=1): scale =", "clear_line = \" \"*(get_terminal_size().columns - 1) print(f\"{clear_line}\\r\" \"Progress: \" f\"|{prog_bar}{spaces}|", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "self._progress_bar.stop() self._recving_file = False return printerror(\"User aborted operation\") except timeout:", "as f: while self._sending_file and fs: packet = f.read(self._packet_size) if", "self.max_value = max_value self.current_val = 0 self.length = length self.rate", "range(num_w)] fsizes[-1] += fsize - sum(fsizes) fn = pack_str(fname) len_fn", "except: return printerror(\"Error with progress thread\") Thread(target=progress_thread).start() self._sender_token = tok", "argparse import ArgumentParser from base64 import b64encode from datetime import", "ProgressBar(fsize, 40) self._progress_bar.start() def progress_thread(): try: # Wait until sending", "self._sender_socket.send(b'\\x06') # ACK except timeout: return printerror(\"Operation timed out\") except:", "sender.recv_param_set() return sender.send_file(filepath) def receive(host_ip_address, port, workers=1): \"\"\" Receive a", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "set with receiver's parameters\") if not exists(filepath): return printerror(\"File doesn't", "]\") self.socket.send(headers) print(\"Waiting for receiver to accept file\") assert self.socket.recv(1)", "thread self._progress_bar = ProgressBar(fs_all, 35) self._progress_bar.start() def progress_thread(): try: #", "except error: continue else: return port def printerror(errormsg): \"\"\"Print an", "self.socket.accept() except timeout: return printerror(\"No sender available\") except: return printerror(\"Error", "b64_tok = b64encode(tok).decode() len_fn = bytes_to_int_s(self._sender_socket.recv(2)) fn = unpack_str(self._sender_socket.recv(len_fn)) len_fs", "] if num_workers > 1 else [] if self.workers: for", "int The amount of workers to be used during transmission.", "= '█'*prog + '▌'*extra spaces = ' '*(self.length - (prog", "while not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully received: {self._sender_filename}\") return res", "send_param_set(self): \"\"\" Pack and send Receiver's parameter settings. Used to", "- (prog + extra)) rate = d_value/d_time if d_time else", "+ self._packet_size size = (self._packet_size - max(0, end_size - end))", "return printerror(\"User aborted operation\") except AssertionError: self._progress_bar.stop() self._sending_file = False", "are not be compatible with the version of your '", "= False return printerror(\"Operation timed out\") except: self._progress_bar.stop() self._sending_file =", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "= self.current_val def stop(self): \"\"\"Stop the progress bar.\"\"\" self.stopped =", "# Headers try: tok = self._sender_socket.recv(6) b64_tok = b64encode(tok).decode() len_fn", "== 'M': res = self._recv_m() else: res = None except:", "# NAK fs = self._sender_filesizes[worker_num] with open(fpath, 'bw') as f:", "sender\") else: self.param_set = True # Simplified Functions def send(ip_address,", "except PermissionError: self._progress_bar.stop() self._sending_file = False return printerror(\"Couldn't access file", "time() self.start_value = self.current_val def stop(self): \"\"\"Stop the progress bar.\"\"\"", "integer for unpacking.\"\"\" res = bytes.hex(byteseq) res = int(res, 16)", "help=\"Target receiver's TCP port number\") send_parser.add_argument('-f', '--file', required=True, help=\"Path to", "print(\"Waiting for receiver to accept file\") assert self.socket.recv(1) == b'\\x06'", "print(f'\\n[x] {errormsg}') exit(-1) exit(-1) exit(-1) exit(-1) else: warn(errormsg) def printalert(alert):", "Sender's parameter settings used during data transmissions. \"\"\" try: printalert(\"Waiting", "be sent. file_sizes : list(int) The sizes of the split-up", "pack_str(fname) len_fn = int_to_bytes_s(len(fn)) fs = [int_to_bytes_l(s) for s in", "out') except: return printerror(\"Error during handshake\") try: len_hn = bytes_to_int_s(self.socket.recv(2))", "len_sender_hn = int_to_bytes_s(len(sender_hn)) self.socket.send(b''.join([len_sender_hn, sender_hn])) assert self.socket.recv(1) == b'\\x06' #", "copyright notice and this permission notice shall be included in", "sock.send(packet) assert sock.recv(1) == b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs -=", "Classes class ProgressBar: \"\"\" For displaying progress bars. Parameters ----------", "= False return True def send_file(self, filepath): \"\"\" Send a", "doesn't exist\") # Headers try: tok = self.token num_w =", "hostname : str The name of the subdirectory where that", "Parameters ---------- filepath : str The filepath of the file", ":obj:`int`, optional The length of the progress bar. \"\"\" @staticmethod", "precision) return f\"{r_bytes}{scale[p]}\" def __init__(self, max_value, length=50): self.max_value = max_value", "bytes for packing.\"\"\" res = ntohl(integer) res = hex(res)[2:] res", "= recver_socket.accept() send_tok = sender_socket.recv(6) if send_tok == self._sender_token: sender_socket.send(b'\\x06')", "and to permit persons to whom the Software is furnished", "for s in fsizes] fs = b''.join(fs) len_fs = int_to_bytes_s(num_w)", "bar.\"\"\" self.stopped = False self.start_time = time() self.start_value = self.current_val", "if not exists(filepath): return printerror(\"File doesn't exist\") self._sending_file = True", "range(len_fs)] fs_all = sum(fs) answer = input(f\"{self._sender_hostname}-{b64_tok}\" f\" wants to", "The receiver's host port to listen on. workers : :obj:`int`,", "now sending file ({ProgressBar.byte_rescale(fsize)})\") # Progress bar thread self._progress_bar =", "False return printerror(\"Error with sockets\") except: self._progress_bar.stop() self._recving_file = False", "wp = [int_to_bytes_s(port) for port in self.worker_ports] wp = b''.join(wp)", "stop(self): \"\"\"Stop the progress bar.\"\"\" self.stopped = True def add_progress(self,", "self.socket.recv(1) == b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet) except", "SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "bar while not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully sent: {fname}\") return", "to be performed\" ) send_parser = subparsers.add_parser(\"send\") recv_parser = subparsers.add_parser(\"recv\")", "sent while not self._progress_bar.done: self._progress_bar.display() except: return printerror(\"Error with progress", "_recv_m(self): \"\"\" Receive a file with multiple workers. Speeds up", "sending file\") finally: self._workers_active -= 1 num_workers = len(self._worker_ports) self._sending_file", "- sum(fsizes) fn = pack_str(fname) len_fn = int_to_bytes_s(len(fn)) fs =", "optional The length of the progress bar. \"\"\" @staticmethod def", "res = '0'*(len(res) % 2) + res return bytes.fromhex(res) def", "exit(-1) exit(-1) exit(-1) else: warn(errormsg) def printalert(alert): \"\"\"Print an alert", "socket\") except: self.socket.send(b'\\x15') # NAK return printerror(\"Error getting parameters from", "whom the Software is furnished to do so, subject to", "NAK return printerror(\"Error while receiving headers\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \"", "time import time from warnings import warn from sys import", "Functions def random_port(host): \"\"\"Return a random available TCP port.\"\"\" while", "printerror('Error initializing sockets') self.param_set = False def __del__(self): if isinstance(self.socket,", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT", "try: with socket(AF_INET, SOCK_STREAM) as sock: sock.connect( (self.recver_ip, self._worker_ports[worker_num]) )", "\"\"\"Stop the progress bar.\"\"\" self.stopped = True def add_progress(self, value):", "if rate else \\ None eta = timedelta(seconds=eta_s) if eta_s", "in all copies or substantial portions of the Software. THE", "self.start_time per = d_value/d_max_value prog = int(self.length*per) extra = self.length*round(per)", "operation\") except AssertionError: self._progress_bar.stop() self._sending_file = False return printerror(f\"Receiver rejected", "\"\"\"Start the progress bar.\"\"\" self.stopped = False self.start_time = time()", "recver_ip : str The IP address of the receiver. main_port", "USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" from argparse", "sender\") self.socket.listen(1) self._sender_socket, addrport = self.socket.accept() except timeout: return printerror(\"No", "file to be sent\") recv_parser.add_argument('-i', '--ip', required=True, help=\"Host IP address\")", "'S': res = self._recv_s() elif self.transfer_type == 'M': res =", "f\"[{ProgressBar.byte_rescale(rate)}/s] \" f\"ETA: {eta}\", end=\"\\r\") # Flyter Classes class FlyterSender:", "help=\"TCP port to listen on\") recv_parser.add_argument('-w', '--workers', type=int, default=1, help=\"TCP", "of this software and associated documentation files (the \"Software\"), to", "receiver's host IP address. port : int The receiver's host", "port = randint(10_000, 65536) with socket(AF_INET, SOCK_STREAM) as sock: try:", "input(f\"{self._sender_hostname}-{b64_tok}\" f\" wants to send: {fn} \" f\"({ProgressBar.byte_rescale(fs_all)}). \" \"Accept?", "if not exists(host_storage_dirname): mkdir(host_storage_dirname) return host_storage_dirname else: return storage_dirname DEFAULT_PACKET_SIZE", "sublicense, and/or sell copies of the Software, and to permit", ": list(int) The sizes of the split-up file to be", "- max(0, end_size - end)) packet = f.read(size) if not", "fsize - sum(fsizes) fn = pack_str(fname) len_fn = int_to_bytes_s(len(fn)) fs", "return printerror(\"Operation timed out\") except: return printerror(\"Error during handshake\") try:", "is received while not self._progress_bar.done: self._progress_bar.display() except: return printerror(\"Error with", "a file.\"\"\" if not self.param_set: return printerror(\"Not yet set with", "= ' '*(self.length - (prog + extra)) rate = d_value/d_time", "receiver's TCP port number\") send_parser.add_argument('-f', '--file', required=True, help=\"Path to the", "= time() self.start_value = self.current_val def stop(self): \"\"\"Stop the progress", "FlyterReciever(host_ip_address, port, workers) receiver.send_param_set() receiver.recv_file() if __name__ == '__main__': parser", "to sender\") else: self.param_set = True # Simplified Functions def", "self._sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet) self._sender_socket.send(b'\\x06') # ACK except", "to receiver\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now sending file", "return printerror(\"Error while saving file\") else: return True def recv_file(self):", "exists(filepath): printerror(\"File doesn't exist\") def threadfunc(worker_num, fpath, start, end): self._workers_active", "= fs # Start receiving try: if self.transfer_type == 'S':", "= self.max_value - self.start_value d_time = time() - self.start_time per", "this permission notice shall be included in all copies or", "operation\") self._sending_file = False return True def send_file(self, filepath): \"\"\"", "timeout: self._progress_bar.stop() self._sending_file = False return printerror(\"Operation timed out\") except:", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "set with receiver's parameters\") # Headers try: tok = self._sender_socket.recv(6)", "self._progress_bar.done: self._progress_bar.display() except: return printerror(\"Error with progress thread\") Thread(target=progress_thread).start() #", "be used. main_port : int The main TCP port to", "access file due to permission error\") while self._workers_active: try: pass", "htonl(res) def pack_str(string): \"\"\"Pack a string into a byte sequence.\"\"\"", "== b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet) except KeyboardInterrupt:", "1 num_workers = len(self.workers) self._recving_file = True try: for w", "address\") send_parser.add_argument('-p', '--port', type=int, required=True, help=\"Target receiver's TCP port number\")", "port to listen on\") if len(argv) > 1: FROMTERMINAL =", "Sends to FlyterReceiver instances. Parameterss ---------- recver_ip : str The", "self.rate = None self.start_time = None self.start_value = None self.stopped", "htons, htonl, \\ gethostname, \\ AF_INET, SOCK_STREAM from threading import", "FlyterSender.DEFAULT_PACKET_SIZE self._recving_file = False self._workers_active = 0 self._progress_bar = ProgressBar(None)", "multiple workers. \"\"\" if not self.param_set: return printerror(\"Sender not yet", "isinstance(self.socket, socket): self.socket.close() def _send_s(self, filepath, file_size): \"\"\" Send a", "self._recving_file = False return printerror(\"User aborted operation\") except timeout: self._progress_bar.stop()", "printerror(\"File doesn't exist\") def threadfunc(worker_num, fpath, start, end): self._workers_active +=", "self.stopped def start(self): \"\"\"Start the progress bar.\"\"\" self.stopped = False", "= b''.join([len_hn, hn, tok, tr_type, len_wp, wp]) except: return printerror(\"Error", "'--ip', required=True, help=\"Target receiver's IP address\") send_parser.add_argument('-p', '--port', type=int, required=True,", "@staticmethod def byte_rescale(data, precision=1): scale = ['B', 'KB', 'MB', 'GB',", "str The Host IP address to be used. main_port :", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING", "KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "file from sender on the same network. Parameters ---------- host_ip_address", "= int(self.length*per) extra = self.length*round(per) > prog prog_bar = '█'*prog", "exit(-1) exit(-1) exit(-1) exit(-1) else: warn(errormsg) def printalert(alert): \"\"\"Print an", "epilog=\"See '<command> --help' to read about a specific sub-command.\" )", "self._recving_file = False return True def _recv_m(self): \"\"\" Receive a", "warn(errormsg) def printalert(alert): \"\"\"Print an alert message.\"\"\" global FROMTERMINAL print(f'[!]", "def send_file(self, filepath): \"\"\" Send a file. Parameters ---------- filepath", "version_info < (3, 6): warn('[!] Some features are not be", "\" f\"is now receiving file ({ProgressBar.byte_rescale(fs_all)})\") # Progress bar thread", "Build the file path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) with", "performed\" ) send_parser = subparsers.add_parser(\"send\") recv_parser = subparsers.add_parser(\"recv\") send_parser.add_argument('-i', '--ip',", "sent. file_sizes : list(int) The sizes of the split-up file", "The Host IP address to be used. main_port : int", "shall be included in all copies or substantial portions of", "fs = [bytes_to_int_l(self._sender_socket.recv(4)) for s in range(len_fs)] fs_all = sum(fs)", "NAK return printerror(\"Error getting parameters from receiver\") else: self.param_set =", "1 else [] self._sender_socket = None self._sender_hostname = None self._sender_token", "FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN", "open(path, 'bw') as output: for w in range(num_workers): wpath =", "self._progress_bar.add_progress(len(packet)) fs -= len(packet) except AssertionError: self._progress_bar.stop() return printerror(\"Receiver rejected", "and unpack Receiver's parameter settings. Used to set Sender's parameter", "printerror(\"Error with progress thread\") Thread(target=progress_thread).start() self._sender_token = tok self._sender_filename =", "= self._recv_s() elif self.transfer_type == 'M': res = self._recv_m() else:", "[] self._sender_socket = None self._sender_hostname = None self._sender_token = None", "tok = self._sender_socket.recv(6) b64_tok = b64encode(tok).decode() len_fn = bytes_to_int_s(self._sender_socket.recv(2)) fn", "socket(AF_INET, SOCK_STREAM) as sock: sock.connect( (self.recver_ip, self._worker_ports[worker_num]) ) sock.send(self.token) assert", "starting to send file\") while self._workers_active: try: pass except KeyboardInterrupt:", "try: # Wait until receiving file while not self._recving_file: pass", "handshake\") try: hn = pack_str(gethostname()) len_hn = int_to_bytes_s(len(hn)) tok =", "d_value/d_time if d_time else float('inf') eta_s = round((d_max_value - d_value)/rate)", "= False return True def _recv_m(self): \"\"\" Receive a file", "self.param_set: return printerror(\"Not yet set with receiver's parameters\") if not", "[ {fname} ]\") self.socket.send(headers) print(\"Waiting for receiver to accept file\")", "def __init__(self, host_ip, main_port, num_workers): self.host_ip = host_ip self.main_port =", "number of workers to use. \"\"\" receiver = FlyterReciever(host_ip_address, port,", "file\") assert self.socket.recv(1) == b'\\x06' # ACK except KeyboardInterrupt: return", "range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) unlink(wpath) except PermissionError:", "try: pass except KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False return printerror(\"User", "'GB', 'TB', 'PB'] p = int(log(data, 2)/10) if data else", "progress thread\") Thread(target=progress_thread).start() self._sender_token = tok self._sender_filename = fn self._sender_filesizes", "and fs: packet = sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet)", "file with a single worker.\"\"\" if not self.param_set: return printerror(\"Sender", "printerror(\"Error during handshake\") try: len_hn = bytes_to_int_s(self.socket.recv(2)) self._recver_hostname = unpack_str(self.socket.recv(len_hn))", "listen on\") if len(argv) > 1: FROMTERMINAL = True args", "except: self.socket.send(b'\\x15') # NAK return printerror(\"Error getting parameters from receiver\")", "are stored. \"\"\" app_dirname = dirname(__file__) appfiles_dirname = join(app_dirname, 'Flyter')", "start, end): self._workers_active += 1 try: with socket(AF_INET, SOCK_STREAM) as", "\"\"\"Print an error message.\"\"\" global FROMTERMINAL if FROMTERMINAL: print(f'\\n[x] {errormsg}')", "% 2) + res return bytes.fromhex(res) def bytes_to_int_s(byteseq): \"\"\"Convert byte", "files are stored. \"\"\" app_dirname = dirname(__file__) appfiles_dirname = join(app_dirname,", "bytes_to_int_s(self._sender_socket.recv(2)) fs = [bytes_to_int_l(self._sender_socket.recv(4)) for s in range(len_fs)] fs_all =", "OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE", "limit of the progress bar. length : :obj:`int`, optional The", "'Received Files') if not exists(storage_dirname): mkdir(storage_dirname) if hostname: host_storage_dirname =", "if len(argv) > 1: FROMTERMINAL = True args = parser.parse_args()", "self._sender_socket.recv(1) == b'\\x06' # ACK except: return printerror(\"Error while sending", "the Software is furnished to do so, subject to the", "join(app_dirname, 'Flyter') if not exists(appfiles_dirname): mkdir(appfiles_dirname) storage_dirname = join(appfiles_dirname, 'Received", "bytes.hex(byteseq) res = int(res, 16) return htonl(res) def pack_str(string): \"\"\"Pack", "file sending processes. Note: Sends to FlyterReceiver instances. Parameterss ----------", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.", "printerror(\"Error while receiving file\") finally: self._workers_active -= 1 num_workers =", "to the file to be sent\") recv_parser.add_argument('-i', '--ip', required=True, help=\"Host", "self._progress_bar.stop() self._sending_file = False return printerror(\"Couldn't access file\") except PermissionError:", "by using multiple workers. Parameters ---------- filepath : str The", "wants to send: {fn} \" f\"({ProgressBar.byte_rescale(fs_all)}). \" \"Accept? (y/n) \")", "warnings import warn from sys import argv, exit, version_info if", "'M': res = self._send_m(fpath, fsizes) assert self.socket.recv(1) == b'\\x06' #", "not exists(appfiles_dirname): mkdir(appfiles_dirname) storage_dirname = join(appfiles_dirname, 'Received Files') if not", "# ACK except AssertionError: return printerror(\"Receiver rejected handshake\") except timeout:", "Utility Classes class ProgressBar: \"\"\" For displaying progress bars. Parameters", "with open(filepath, 'br') as f: while self._sending_file and fs: packet", "type=int, required=True, help=\"TCP port to listen on\") recv_parser.add_argument('-w', '--workers', type=int,", "Flyter file sending processes. Note: Sends to FlyterReceiver instances. Parameterss", "__version__ = (0, 0, 0) __author__ = \"CryptoNyxz\" __license__ =", "32 - but integer to bytes for packing.\"\"\" res =", "TCP port number\") send_parser.add_argument('-f', '--file', required=True, help=\"Path to the file", "to bytes for packing.\"\"\" res = ntohs(integer) res = hex(res)[2:]", "mkdir(host_storage_dirname) return host_storage_dirname else: return storage_dirname DEFAULT_PACKET_SIZE = 512 def", "operation\") self._recving_file = False try: # Build the file path", "with receiver's parameters\") if not exists(filepath): printerror(\"File doesn't exist\") def", "on the same network. Parameters ---------- ip_address : str The", "str The receiver's host IP address. port : int The", "self.param_set: return printerror(\"Not yet set with receiver's parameters\") # Headers", "Receive a file from sender on the same network. Parameters", "timed out\") except error: self._progress_bar.stop() self._recving_file = False return printerror(\"Error", "subparsers.add_parser(\"recv\") send_parser.add_argument('-i', '--ip', required=True, help=\"Target receiver's IP address\") send_parser.add_argument('-p', '--port',", "False return printerror(f\"Receiver rejected packet\") except FileNotFoundError: self._progress_bar.stop() self._sending_file =", "pack_str(self.transfer_type) len_wp = int_to_bytes_s(len(self.worker_ports)) wp = [int_to_bytes_s(port) for port in", "except AssertionError: self._progress_bar.stop() return printerror(\"Receiver rejected packet\") except FileNotFoundError: self._progress_bar.stop()", "unpack_str(self.socket.recv(len_hn)) self._recver_token = self.socket.recv(6) self._transfer_type = unpack_str(self.socket.recv(1)) len_wp = bytes_to_int_s(self.socket.recv(2))", "The number of workers to use. \"\"\" receiver = FlyterReciever(host_ip_address,", "progress bar while not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully received: {self._sender_filename}\")", "\\ mkdir, stat, unlink from os.path import dirname, exists, join", "from warnings import warn from sys import argv, exit, version_info", "(0, 0, 0) __author__ = \"CryptoNyxz\" __license__ = \"\"\" MIT", "NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "= False @property def done(self): \"\"\"Return if already finished.\"\"\" return", "self._sending_file and fs: end_size = f.tell() + self._packet_size size =", "== b'\\x06' # ACK except KeyboardInterrupt: return printerror(\"User aborted operation\")", "printerror(\"Couldn't access storage directory\") except error: self._progress_bar.stop() return printerror(\"Error with", "finished.\"\"\" return self.current_val >= self.max_value or self.stopped def start(self): \"\"\"Start", "encryption. \"\"\" __version__ = (0, 0, 0) __author__ = \"CryptoNyxz\"", "return printerror(f\"Sending file was unsuccessful\") else: # Wait for progress", "sock.send(self.token) assert sock.recv(1) == b'\\x06' # ACK fs = end", "used during transmission. \"\"\" @staticmethod def storage_dir(hostname=None): \"\"\" Return the", "help=\"Path to the file to be sent\") recv_parser.add_argument('-i', '--ip', required=True,", "storage_dir(hostname=None): \"\"\" Return the path of the storage dir for", "return byteseq.decode() # Utility Classes class ProgressBar: \"\"\" For displaying", "self._sending_file and fs: packet = f.read(self._packet_size) if not packet: break", "'0'*(len(res) % 2) + res return bytes.fromhex(res) def bytes_to_int_s(byteseq): \"\"\"Convert", "d_value)/rate) if rate else \\ None eta = timedelta(seconds=eta_s) if", "settings. Used to set Sender's parameter settings used during data", "to the file to be sent. \"\"\" if not self.param_set:", "# Display until file is sent while not self._progress_bar.done: self._progress_bar.display()", "Permission is hereby granted, free of charge, to any person", "None self.start_time = None self.start_value = None self.stopped = False", "The IP address of the receiver. main_port : int The", "Send a file. Parameters ---------- filepath : str The filepath", "Flyter Classes class FlyterSender: \"\"\" Handles Flyter file sending processes.", "assert self.socket.recv(1) == b'\\x06' # ACK except AssertionError: return printerror(\"Receiver", "= FlyterSender.DEFAULT_PACKET_SIZE self._sending_file = False self._workers_active = 0 self._progress_bar =", "printalert(\"Rejected file transfer\") except error: return printerror(\"Sender isn't available anymore\")", "wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) with open(wpath, 'br') as", "= self._send_s(fpath, fsize) elif self._transfer_type == 'M': res = self._send_m(fpath,", "sequence.\"\"\" return string.encode() def unpack_str(byteseq): \"\"\"Unpack a byte sequence into", "dir for received files. If storage directory doesn't exist, creates", "self.token tr_type = pack_str(self.transfer_type) len_wp = int_to_bytes_s(len(self.worker_ports)) wp = [int_to_bytes_s(port)", "Exception: return printerror(\"Error while sending headers to receiver\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()}", "= host_ip self.main_port = main_port self.token = <PASSWORD>_<PASSWORD>(6) self.transfer_type =", "' 'python interpreter') FROMTERMINAL = False # Utility Functions def", "printerror(\"Operation timed out\") except: self._progress_bar.stop() self._sending_file = False return printerror(f\"Error", "printerror(\"File doesn't exist\") self._sending_file = True try: fs = file_size", "fs: packet = sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet) sender_socket.send(b'\\x06')", "FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) with open(wpath, 'br') as temp: packet =", "w in range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) unlink(wpath)", "file to be sent. \"\"\" sender = FlyterSender(ip_address, port) sender.recv_param_set()", "prog=\"Flyter\", epilog=\"See '<command> --help' to read about a specific sub-command.\"", "+= value def display(self): \"\"\"Display the current progress.\"\"\" if self.stopped:", "bytes.hex(byteseq) res = int(res, 16) return htons(res) def int_to_bytes_l(integer): \"\"\"Convert", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "return printerror(\"User aborted operation\") except AssertionError: return printerror(\"Receiver rejected\") except", "break sock.send(packet) assert sock.recv(1) == b'\\x06' # ACK self._progress_bar.add_progress(len(packet)) fs", "ArgumentParser from base64 import b64encode from datetime import timedelta from", "yet set with receiver's parameters\") if not exists(filepath): printerror(\"File doesn't", "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "while receiving file\") finally: self._workers_active -= 1 num_workers = len(self.workers)", "self.socket.recv(1) == b'\\x06' # ACK except: self._progress_bar.stop() self._sending_file = False", "= fn self._sender_filesizes = fs # Start receiving try: if", "return htons(res) def int_to_bytes_l(integer): \"\"\"Convert 32 - but integer to", "try: tok = self._sender_socket.recv(6) b64_tok = b64encode(tok).decode() len_fn = bytes_to_int_s(self._sender_socket.recv(2))", "self.start_value = None self.stopped = False @property def done(self): \"\"\"Return", "from datetime import timedelta from math import log from os", "filepath : str The filepath to the file to be", "IN THE SOFTWARE. \"\"\" from argparse import ArgumentParser from base64", "= socket(AF_INET, SOCK_STREAM) self.socket.settimeout(60) except: printerror('Error initializing sockets') self.param_set =", "of the progress bar. length : :obj:`int`, optional The length", "w in self.workers: w.close() def _recv_s(self): \"\"\"Receive a file with", "f\"{w}_{self._sender_filename}\" ) Thread( target=threadfunc, args=(w, wpath), ).start() except FileNotFoundError: return", "target=threadfunc, args=(w, wpath), ).start() except FileNotFoundError: return printerror(\"Couldn't access file\")", "packet = temp.read(self._packet_size) output.write(packet) # Clear the contents of the", "port of the receiver. \"\"\" DEFAULT_PACKET_SIZE = 1024 def __init__(self,", "args.file) elif args.action == \"recv\": receive(args.ip, args.port, args.workers) else: parser.print_help()", "try: if self._transfer_type == 'S': res = self._send_s(fpath, fsize) elif", "headers to sender\") else: self.param_set = True # Simplified Functions", "f.tell() + self._packet_size size = (self._packet_size - max(0, end_size -", "tok self._sender_filename = fn self._sender_filesizes = fs # Start receiving", "message.\"\"\" global FROMTERMINAL if FROMTERMINAL: print(f'\\n[x] {errormsg}') exit(-1) exit(-1) exit(-1)", "0 r_bytes = round(data/pow(2, 10*p), precision) return f\"{r_bytes}{scale[p]}\" def __init__(self,", "self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully received: {self._sender_filename}\") return res def send_param_set(self):", "transmissions. \"\"\" try: printalert(\"Waiting for sender\") self.socket.listen(1) self._sender_socket, addrport =", "<gh_stars>0 \"\"\" Flyter Tool for transferring files on the same", "pass except KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False return printerror(\"User aborted", "Simplified Functions def send(ip_address, port, filepath): \"\"\" Send file to", "except: return printerror(\"Error while waiting for sender\") try: len_sender_hn =", "tok, tr_type, len_wp, wp]) except: return printerror(\"Error building headers\") try:", "random import randint from secrets import token_bytes from shutil import", "= pack_str(gethostname()) len_sender_hn = int_to_bytes_s(len(sender_hn)) self.socket.send(b''.join([len_sender_hn, sender_hn])) assert self.socket.recv(1) ==", "self._sending_file = False return True def _send_m(self, filepath, file_sizes): \"\"\"", "with open(fpath, 'bw') as f: while self._recving_file and f.writable() and", "'--port', type=int, required=True, help=\"Target receiver's TCP port number\") send_parser.add_argument('-f', '--file',", "display(self): \"\"\"Display the current progress.\"\"\" if self.stopped: return d_value =", "self.socket.settimeout(60) except: printerror('Error initializing sockets') self.param_set = False def __del__(self):", "KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False printerror(\"User aborted operation\") self._recving_file =", "= False return printerror(f\"Receiver rejected packet\") except FileNotFoundError: self._progress_bar.stop() self._sending_file", "fpath.split(sep)[-1] fsize = stat(fpath).st_size fsizes = [fsize//num_w for w in", "out\") except Exception: return printerror(\"Error while sending headers to receiver\")", "file\") else: self._recving_file = False return True def _recv_m(self): \"\"\"", "self._recv_s() elif self.transfer_type == 'M': res = self._recv_m() else: res", "# Progress bar thread self._progress_bar = ProgressBar(fs_all, 35) self._progress_bar.start() def", "a string.\"\"\" return byteseq.decode() # Utility Classes class ProgressBar: \"\"\"", "import token_bytes from shutil import get_terminal_size from socket import \\", "self.socket.close() if self.__dict__.get('workers'): for w in self.workers: w.close() def _recv_s(self):", "to be used. main_port : int The main TCP port", "Receiver's parameter settings. Used to set Sender's parameter settings used", "Software without restriction, including without limitation the rights to use,", "prog prog_bar = '█'*prog + '▌'*extra spaces = ' '*(self.length", "out\") except: self._progress_bar.stop() return printerror(f\"Error while sending file\") else: self._sending_file", "sending headers to sender\") else: self.param_set = True # Simplified", "from threading import Thread from time import time from warnings", "= False return printerror(\"User aborted operation\") self._sending_file = False return", "res = hex(res)[2:] res = '0'*(len(res) % 2) + res", "string into a byte sequence.\"\"\" return string.encode() def unpack_str(byteseq): \"\"\"Unpack", "# ACK else: self._sender_socket.send(b'\\x06') # NAK return printalert(\"Rejected file transfer\")", "NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS", "return printerror(\"Error with sockets\") except: self._progress_bar.stop() self._recving_file = False return", "except timeout: return printerror(\"No sender available\") except: return printerror(\"Error while", "hereby granted, free of charge, to any person obtaining a", "timed out\") except: return printerror(\"Error during handshake\") try: hn =", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "Parameters ---------- ip_address : str The target receiver's IP address.", "out\") except: return printerror(\"Error during handshake\") try: hn = pack_str(gethostname())", "return htonl(res) def pack_str(string): \"\"\"Pack a string into a byte", "return True def send_file(self, filepath): \"\"\" Send a file. Parameters", "permission error\") while self._workers_active: try: pass except KeyboardInterrupt: self._progress_bar.stop() self._recving_file", "Thread from time import time from warnings import warn from", "contents of the temp file open(wpath, 'bw').close() # Delete the", "from secrets import token_bytes from shutil import get_terminal_size from socket", "host's sent files are stored. \"\"\" app_dirname = dirname(__file__) appfiles_dirname", "access file due to permission \" \"error\") except timeout: self._progress_bar.stop()", "gethostname, \\ AF_INET, SOCK_STREAM from threading import Thread from time", "ACK except: self._progress_bar.stop() self._sending_file = False return printerror(f\"Sending file was", "isn't available anymore\") except: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error while", "= False return True def _send_m(self, filepath, file_sizes): \"\"\" Send", "parameter settings. Used to set Sender's parameter settings used during", "= join(storage_dirname, hostname) if not exists(host_storage_dirname): mkdir(host_storage_dirname) return host_storage_dirname else:", "FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE", "printerror(\"Error while sending headers to receiver\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \"", "\"\"\"Convert byte sequence to 32 - but integer for unpacking.\"\"\"", "host port to listen on. workers : :obj:`int`, optional The", "fs -= len(packet) except AssertionError: self._progress_bar.stop() return printerror(\"Receiver rejected packet\")", "= b''.join(wp) headers = b''.join([len_hn, hn, tok, tr_type, len_wp, wp])", "= False self._workers_active = 0 self._progress_bar = None try: self.socket", "pack_str(gethostname()) len_hn = int_to_bytes_s(len(hn)) tok = self.token tr_type = pack_str(self.transfer_type)", "threadfunc(worker_num, fpath, start, end): self._workers_active += 1 try: with socket(AF_INET,", "except timeout: return printerror(\"Operation timed out\") except: return printerror(\"Error during", "if send_tok == self._sender_token: sender_socket.send(b'\\x06') # ACK else: sender_socket.send(b'\\x15') #", "self._workers_active += 1 try: with socket(AF_INET, SOCK_STREAM) as sock: sock.connect(", "printerror(\"Error while starting to send file\") while self._workers_active: try: pass", "max_value : int, float The upper limit of the progress", "sent. \"\"\" if not self.param_set: return printerror(\"Not yet set with", "access file due to permission error\") except timeout: self._progress_bar.stop() return", "printerror(f\"Error while sending file\") finally: self._workers_active -= 1 num_workers =", "\"\"\" from argparse import ArgumentParser from base64 import b64encode from", "self._progress_bar = ProgressBar(fsize, 40) self._progress_bar.start() def progress_thread(): try: # Wait", "file was unsuccessful\") else: self._sender_socket.send(b'\\x06') # ACK # Wait for", "the progress bar.\"\"\" self.stopped = True def add_progress(self, value): \"\"\"", "= [int_to_bytes_s(port) for port in self.worker_ports] wp = b''.join(wp) headers", "\"\"\" Send a file with a single worker. Parameters ----------", "= False return printerror(\"Receiving file was unsuccessful\") else: self._sender_socket.send(b'\\x06') #", "FROMTERMINAL = False # Utility Functions def random_port(host): \"\"\"Return a", "0 self._progress_bar = None try: self.socket = socket(AF_INET, SOCK_STREAM) self.socket.settimeout(60)", "listen on\") recv_parser.add_argument('-w', '--workers', type=int, default=1, help=\"TCP port to listen", "if num_workers > 1 else [] if self.workers: for w", "b'\\x06' # ACK except KeyboardInterrupt: return printerror(\"User aborted operation\") except", "'?' clear_line = \" \"*(get_terminal_size().columns - 1) print(f\"{clear_line}\\r\" \"Progress: \"", "\"\"\"Display the current progress.\"\"\" if self.stopped: return d_value = self.current_val", "False printerror(\"User aborted operation\") self._recving_file = False try: # Build", "def __del__(self): if isinstance(self.__dict__.get('socket'), socket): self.socket.close() if self.__dict__.get('workers'): for w", "Software is furnished to do so, subject to the following", "send file\") while self._workers_active: try: pass except KeyboardInterrupt: self._progress_bar.stop() self._sending_file", "parameters\") try: self._recving_file = True path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename", "due to permissions\") except error: return printerror(\"Error with sockets\") except:", "Send file to receiver on the same network. Parameters ----------", "target receiver's IP address. port : int The target receiver's", "receiver to accept file\") assert self.socket.recv(1) == b'\\x06' # ACK", "included in all copies or substantial portions of the Software.", "float Added progress value. \"\"\" if self.stopped: return self.current_val +=", "main TCP port. filepath : str The path to the", "on. workers : :obj:`int`, optional The number of workers to", "port : int The receiver's host port to listen on.", "printerror(\"Receiver rejected handshake\") except timeout: return printerror('Operation timed out') except:", "[fsize//num_w for w in range(num_w)] fsizes[-1] += fsize - sum(fsizes)", "= None self._sender_filesizes = None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._recving_file =", "to set Sender's parameter settings used during data transmissions. \"\"\"", ") with open(path, 'bw') as output: for w in range(num_workers):", "parameter settings used during data transmissions. \"\"\" try: printalert(\"Waiting for", "self._sending_file = False return printerror(\"User aborted operation\") self._sending_file = False", "Some features are not be compatible with the version of", "num_workers == 1 else 'M' self.worker_ports = [ random_port(self.host_ip) for", "files on the same network using raw sockets. Doesn't use", "Pack and send Receiver's parameter settings. Used to set Sender's", "headers = b''.join([len_hn, hn, tok, tr_type, len_wp, wp]) except: return", "[ socket(AF_INET, SOCK_STREAM) for w in range(num_workers) ] if num_workers", "from shutil import get_terminal_size from socket import \\ socket, error,", "except: return printerror(\"Error while starting to send file\") while self._workers_active:", "res = bytes.hex(byteseq) res = int(res, 16) return htons(res) def", "== 'M': res = self._send_m(fpath, fsizes) assert self.socket.recv(1) == b'\\x06'", "1 else 'M' self.worker_ports = [ random_port(self.host_ip) for w in", "self._workers_active -= 1 num_workers = len(self._worker_ports) self._sending_file = True try:", "= False return printerror(f\"Error while sending file\") finally: self._workers_active -=", "socket import \\ socket, error, timeout, \\ ntohs, ntohl, htons,", "from sender on the same network. Parameters ---------- host_ip_address :", "sequence into a string.\"\"\" return byteseq.decode() # Utility Classes class", "to use. \"\"\" receiver = FlyterReciever(host_ip_address, port, workers) receiver.send_param_set() receiver.recv_file()", "False return printerror(\"Receiving file was unsuccessful\") else: self._sender_socket.send(b'\\x06') # ACK", "range(num_workers): Thread( target=threadfunc, args=( w, filepath, size, size + file_sizes[w]", "else [] self._sender_socket = None self._sender_hostname = None self._sender_token =", "length of the progress bar. \"\"\" @staticmethod def byte_rescale(data, precision=1):", "sender.send_file(filepath) def receive(host_ip_address, port, workers=1): \"\"\" Receive a file from", "to the file to be sent. \"\"\" sender = FlyterSender(ip_address,", ": str The Host IP address to be used. main_port", "to permissions\") except error: return printerror(\"Error with sockets\") except: self._sender_socket.send(b'\\x15')", "dest=\"action\", help=\"The action to be performed\" ) send_parser = subparsers.add_parser(\"send\")", "try: self.socket = socket(AF_INET, SOCK_STREAM) self.socket.settimeout(60) except: printerror('Error initializing sockets')", "send_tok == self._sender_token: sender_socket.send(b'\\x06') # ACK else: sender_socket.send(b'\\x15') # NAK", "end)) packet = f.read(size) if not packet: break sock.send(packet) assert", "answer = input(f\"{self._sender_hostname}-{b64_tok}\" f\" wants to send: {fn} \" f\"({ProgressBar.byte_rescale(fs_all)}).", "= subparsers.add_parser(\"send\") recv_parser = subparsers.add_parser(\"recv\") send_parser.add_argument('-i', '--ip', required=True, help=\"Target receiver's", "def __init__(self, recver_ip, main_port): self.recver_ip = recver_ip self.main_port = main_port", "10*p), precision) return f\"{r_bytes}{scale[p]}\" def __init__(self, max_value, length=50): self.max_value =", ": :obj:`int`, optional The length of the progress bar. \"\"\"", "as output: for w in range(num_workers): wpath = join( FlyterReciever.storage_dir(self._sender_hostname),", "self._progress_bar.display() print(f\"\\nSuccessfully sent: {fname}\") return res def recv_param_set(self): \"\"\" Receive", "headers to receiver\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now sending", "= 0 for w in range(num_workers): Thread( target=threadfunc, args=( w,", "if answer.lower() == 'y': self._sender_socket.send(b'\\x06') # ACK else: self._sender_socket.send(b'\\x06') #", "res = int(res, 16) return htonl(res) def pack_str(string): \"\"\"Pack a", "len_hn = int_to_bytes_s(len(hn)) tok = self.token tr_type = pack_str(self.transfer_type) len_wp", "# ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet) except KeyboardInterrupt: self._progress_bar.stop() self._sending_file", "permission error\") except: return printerror(\"Error while starting to send file\")", "= False printerror(\"User aborted operation\") self._recving_file = False try: #", "= tok self._sender_filename = fn self._sender_filesizes = fs # Start", "while self._recving_file and fs: packet = self._sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs", "self._recv_m() else: res = None except: self._progress_bar.stop() self._recving_file = False", "send(args.ip, args.port, args.file) elif args.action == \"recv\": receive(args.ip, args.port, args.workers)", "in range(num_workers): Thread( target=threadfunc, args=( w, filepath, size, size +", "while self._recving_file and f.writable() and fs: packet = sender_socket.recv(self._packet_size) f.write(packet)", "to the file to be sent. file_sizes : list(int) The", "= b''.join([tok, len_fn, fn, len_fs, fs]) except: return printerror(\"Error while", "from argparse import ArgumentParser from base64 import b64encode from datetime", "upper limit of the progress bar. length : :obj:`int`, optional", "\"\"\" Handles Flyter file sending processes. Note: Sends to FlyterReceiver", "yet set with parameters\") try: self._recving_file = True path =", "parameters\") # Headers try: tok = self._sender_socket.recv(6) b64_tok = b64encode(tok).decode()", "open(fpath, 'br') as f: f.seek(start) while self._sending_file and fs: end_size", "to 16 - but integer for unpacking.\"\"\" res = bytes.hex(byteseq)", "int(res, 16) return htons(res) def int_to_bytes_l(integer): \"\"\"Convert 32 - but", "Count new progress. Parameter --------- value : int, float Added", "'M' self.worker_ports = [ random_port(self.host_ip) for w in range(num_workers) ]", "to be sent. \"\"\" if not self.param_set: return printerror(\"Not yet", "Flyter Tool for transferring files on the same network using", "self._recving_file = True path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) fs", "mkdir(appfiles_dirname) storage_dirname = join(appfiles_dirname, 'Received Files') if not exists(storage_dirname): mkdir(storage_dirname)", "during handshake\") try: len_hn = bytes_to_int_s(self.socket.recv(2)) self._recver_hostname = unpack_str(self.socket.recv(len_hn)) self._recver_token", "notice and this permission notice shall be included in all", "parser.add_subparsers( dest=\"action\", help=\"The action to be performed\" ) send_parser =", "required=True, help=\"Host IP address\") recv_parser.add_argument('-p', '--port', type=int, required=True, help=\"TCP port", "exist, creates it first. Parameters ---------- hostname : str The", "def threadfunc(worker_num, fpath, start, end): self._workers_active += 1 try: with", "fs: packet = f.read(self._packet_size) if not packet: break self.socket.send(packet) assert", "printerror(f\"Sending file was unsuccessful\") else: # Wait for progress bar", "= d_value/d_time if d_time else float('inf') eta_s = round((d_max_value -", "IP address\") recv_parser.add_argument('-p', '--port', type=int, required=True, help=\"TCP port to listen", "\"error\") except timeout: self._progress_bar.stop() self._sending_file = False return printerror(\"Operation timed", "self.length = length self.rate = None self.start_time = None self.start_value", "try: self.socket.connect((self.recver_ip, self.main_port)) except error: return printerror(\"Can't connect to \"", "rejected\") except timeout: return printerror(\"Operation timed out\") except Exception: return", "end_size = f.tell() + self._packet_size size = (self._packet_size - max(0,", "if not packet: break self.socket.send(packet) assert self.socket.recv(1) == b'\\x06' #", "from os.path import dirname, exists, join from random import randint", "worker. Parameters ---------- filepath : str The filepath to the", "ACK except AssertionError: return printerror(\"Receiver rejected handshake\") except timeout: return", "with sockets\") except: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error while saving", "exist\") self._sending_file = True try: fs = file_size with open(filepath,", "_send_m(self, filepath, file_sizes): \"\"\" Send a file with multiple workers.", "and/or sell copies of the Software, and to permit persons", "fs -= len(packet) except KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False return", "file_sizes[w] ), ).start() size += file_sizes[w] except FileNotFoundError: return printerror(\"Couldn't", "bytes for packing.\"\"\" res = ntohs(integer) res = hex(res)[2:] res", "Parameter --------- value : int, float Added progress value. \"\"\"", "available TCP port.\"\"\" while True: port = randint(10_000, 65536) with", "bar while not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully received: {self._sender_filename}\") return", "self.param_set = False def __del__(self): if isinstance(self.__dict__.get('socket'), socket): self.socket.close() if", "self._recving_file = False return printerror(\"Error while receiving file\") finally: self._workers_active", "receiver. main_port : int The main TCP port of the", "rate by using multiple workers. Parameters ---------- filepath : str", "The length of the progress bar. \"\"\" @staticmethod def byte_rescale(data,", "sockets\") except: self._progress_bar.stop() self._recving_file = False return printerror(\"Error while receiving", "self._progress_bar.stop() return printerror(\"Receiver rejected packet\") except FileNotFoundError: self._progress_bar.stop() return printerror(\"Couldn't", "ACK except KeyboardInterrupt: return printerror(\"User aborted operation\") except AssertionError: return", "f\"is now sending file ({ProgressBar.byte_rescale(fsize)})\") # Progress bar thread self._progress_bar", "except FileNotFoundError: self._progress_bar.stop() return printerror(\"Downloading file has been deleted\") except", "return printerror(\"Downloading file has been deleted\") except PermissionError: self._progress_bar.stop() return", "return printerror(\"No sender available\") except: return printerror(\"Error while waiting for", "\"\"\"Unpack a byte sequence into a string.\"\"\" return byteseq.decode() #", "pass except KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False printerror(\"User aborted operation\")", "\"Accept? (y/n) \") if answer.lower() == 'y': self._sender_socket.send(b'\\x06') # ACK", "while waiting for sender\") try: len_sender_hn = bytes_to_int_s(self._sender_socket.recv(2)) sender_hn =", "# ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet) except AssertionError: self._progress_bar.stop() return", "p = int(log(data, 2)/10) if data else 0 r_bytes =", "except timeout: return printerror('Operation timed out') except: return printerror(\"Error during", "True while packet: packet = temp.read(self._packet_size) output.write(packet) # Clear the", "main_port self.token = <PASSWORD>_<PASSWORD>(6) self.transfer_type = 'S' if num_workers ==", "return printerror(f\"Error while sending file\") finally: self._workers_active -= 1 num_workers", "\" f\"ETA: {eta}\", end=\"\\r\") # Flyter Classes class FlyterSender: \"\"\"", "copy of this software and associated documentation files (the \"Software\"),", "sockets') self.param_set = False def __del__(self): if isinstance(self.socket, socket): self.socket.close()", "return printerror(\"Receiver rejected\") except timeout: return printerror(\"Operation timed out\") except", "+= file_sizes[w] except FileNotFoundError: return printerror(\"Couldn't access file\") except PermissionError:", "+ res return bytes.fromhex(res) def bytes_to_int_l(byteseq): \"\"\"Convert byte sequence to", "for packing.\"\"\" res = ntohs(integer) res = hex(res)[2:] res =", "{fname}\") return res def recv_param_set(self): \"\"\" Receive and unpack Receiver's", "extra)) rate = d_value/d_time if d_time else float('inf') eta_s =", "'bw').close() # Delete the temp files for w in range(num_workers):", "sender_hn = self._sender_socket.recv(len_sender_hn) self._sender_hostname = unpack_str(sender_hn) self._sender_socket.send(b'\\x06') # ACK except", "range(len_wp)] self.socket.send(b'\\x06') # ACK except error: return printerror(\"Error getting connected", "OTHER DEALINGS IN THE SOFTWARE. \"\"\" from argparse import ArgumentParser", "error\") except timeout: self._progress_bar.stop() return printerror(\"Operation timed out\") except: self._progress_bar.stop()", "= [bytes_to_int_l(self._sender_socket.recv(4)) for s in range(len_fs)] fs_all = sum(fs) answer", "socket(AF_INET, SOCK_STREAM) as sock: try: sock.bind((host, port)) except error: continue", "False return printerror(\"Operation timed out\") except: self._progress_bar.stop() self._sending_file = False", "def printerror(errormsg): \"\"\"Print an error message.\"\"\" global FROMTERMINAL if FROMTERMINAL:", "= join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) unlink(wpath) except PermissionError: self._sender_socket.send(b'\\x15') #", "'KB', 'MB', 'GB', 'TB', 'PB'] p = int(log(data, 2)/10) if", "the contents of the temp file open(wpath, 'bw').close() # Delete", "the receiver. main_port : int The main TCP port of", "Wait until receiving file while not self._recving_file: pass # Display", "path = join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) fs = self._sender_filesizes[0] with", "file with multiple workers. Speeds up transmission rate by using", "IP address\") send_parser.add_argument('-p', '--port', type=int, required=True, help=\"Target receiver's TCP port", "self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully sent: {fname}\") return res def recv_param_set(self):", "for sender\") self.socket.listen(1) self._sender_socket, addrport = self.socket.accept() except timeout: return", "_send_s(self, filepath, file_size): \"\"\" Send a file with a single", "= b''.join(fs) len_fs = int_to_bytes_s(num_w) headers = b''.join([tok, len_fn, fn,", "ip_address : str The target receiver's IP address. port :", "in range(num_w)] fsizes[-1] += fsize - sum(fsizes) fn = pack_str(fname)", "except error: self._progress_bar.stop() self._recving_file = False return printerror(\"Error with sockets\")", "restriction, including without limitation the rights to use, copy, modify,", "If storage directory doesn't exist, creates it first. Parameters ----------", "self._progress_bar = None try: self.socket = socket(AF_INET, SOCK_STREAM) self.socket.settimeout(60) except:", "packet = sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet) sender_socket.send(b'\\x06') #", "\\ gethostname, \\ AF_INET, SOCK_STREAM from threading import Thread from", "fs: packet = self._sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs -= len(packet) self._sender_socket.send(b'\\x06')", "about a specific sub-command.\" ) subparsers = parser.add_subparsers( dest=\"action\", help=\"The", "f.read(self._packet_size) if not packet: break self.socket.send(packet) assert self.socket.recv(1) == b'\\x06'", "htonl, \\ gethostname, \\ AF_INET, SOCK_STREAM from threading import Thread", "class FlyterReciever: \"\"\" Handles Flyter file receiving processes. Note: Receives", "the path of the storage dir for received files. If", "recver_socket = self.workers[worker_num] recver_socket.listen(1) sender_socket, hostaddr = recver_socket.accept() send_tok =", "wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) unlink(wpath) except PermissionError: self._sender_socket.send(b'\\x15')", "OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "= max(1, len(self._worker_ports)) fpath = filepath.replace(altsep, sep) fname = fpath.split(sep)[-1]", "self._progress_bar.display() print(f\"\\nSuccessfully received: {self._sender_filename}\") return res def send_param_set(self): \"\"\" Pack", "- but integer to bytes for packing.\"\"\" res = ntohl(integer)", "# ACK fs = end - start with open(fpath, 'br')", "try: printalert(\"Waiting for sender\") self.socket.listen(1) self._sender_socket, addrport = self.socket.accept() except", "self.workers[worker_num] recver_socket.listen(1) sender_socket, hostaddr = recver_socket.accept() send_tok = sender_socket.recv(6) if", "OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "receiver's parameters\") # Headers try: tok = self._sender_socket.recv(6) b64_tok =", "self._workers_active = 0 self._progress_bar = None try: self.socket = socket(AF_INET,", "self._sender_socket, addrport = self.socket.accept() except timeout: return printerror(\"No sender available\")", "= self.current_val - self.start_value d_max_value = self.max_value - self.start_value d_time", "self._workers_active -= 1 num_workers = len(self.workers) self._recving_file = True try:", "except KeyboardInterrupt: self._progress_bar.stop() self._sending_file = False return printerror(\"User aborted operation\")", "def progress_thread(): try: # Wait until sending file while not", "self._progress_bar.display() except: return printerror(\"Error with progress thread\") Thread(target=progress_thread).start() self._sender_token =", "'0'*(len(res) % 2) + res return bytes.fromhex(res) def bytes_to_int_l(byteseq): \"\"\"Convert", "be compatible with the version of your ' 'python interpreter')", "self.socket.send(headers) print(\"Waiting for receiver to accept file\") assert self.socket.recv(1) ==", "try: tok = self.token num_w = max(1, len(self._worker_ports)) fpath =", ": int The amount of workers to be used during", "Send a file with multiple workers. Speeds up transmission rate", "self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error while receiving headers\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()}", "processes. Note: Sends to FlyterReceiver instances. Parameterss ---------- recver_ip :", "- self.start_value d_time = time() - self.start_time per = d_value/d_max_value", "IP address to be used. main_port : int The main", "for progress bar while not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully sent:", "log from os import altsep, sep, \\ mkdir, stat, unlink", "exist\") def threadfunc(worker_num, fpath, start, end): self._workers_active += 1 try:", "if args.action == \"send\": send(args.ip, args.port, args.file) elif args.action ==", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR", "extra = self.length*round(per) > prog prog_bar = '█'*prog + '▌'*extra", "= \"CryptoNyxz\" __license__ = \"\"\" MIT License Copyright (c) 2021", "1024 def __init__(self, recver_ip, main_port): self.recver_ip = recver_ip self.main_port =", "self._sender_filesizes = fs # Start receiving try: if self.transfer_type ==", "False return printerror(\"Couldn't access file due to permission \" \"error\")", "False self._workers_active = 0 self._progress_bar = None try: self.socket =", "return printerror(\"Couldn't access file\") except PermissionError: return printerror(\"Couldn't access file", "for transferring files on the same network using raw sockets.", "value def display(self): \"\"\"Display the current progress.\"\"\" if self.stopped: return", "the Software, and to permit persons to whom the Software", "ntohl(integer) res = hex(res)[2:] res = '0'*(len(res) % 2) +", "\" f\"({ProgressBar.byte_rescale(fs_all)}). \" \"Accept? (y/n) \") if answer.lower() == 'y':", "printalert(\"Waiting for sender\") self.socket.listen(1) self._sender_socket, addrport = self.socket.accept() except timeout:", "and associated documentation files (the \"Software\"), to deal in the", "FROMTERMINAL: print(f'\\n[x] {errormsg}') exit(-1) exit(-1) exit(-1) exit(-1) else: warn(errormsg) def", "AF_INET, SOCK_STREAM from threading import Thread from time import time", "ACK # Wait for progress bar while not self._progress_bar.done: pass", "Wait for progress bar while not self._progress_bar.done: pass self._progress_bar.display() print(f\"\\nSuccessfully", "receiving file\") finally: self._workers_active -= 1 num_workers = len(self.workers) self._recving_file", "Thread(target=progress_thread).start() # Start sending res = None try: if self._transfer_type", "the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "recv_param_set(self): \"\"\" Receive and unpack Receiver's parameter settings. Used to", "not exists(filepath): printerror(\"File doesn't exist\") def threadfunc(worker_num, fpath, start, end):", "from time import time from warnings import warn from sys", "self._sending_file = False return printerror(\"Couldn't access file due to permission", "len_fn = bytes_to_int_s(self._sender_socket.recv(2)) fn = unpack_str(self._sender_socket.recv(len_fn)) len_fs = bytes_to_int_s(self._sender_socket.recv(2)) fs", "Host IP address to be used. main_port : int The", "features are not be compatible with the version of your", "f\"{w}_{self._sender_filename}\" ) with open(wpath, 'br') as temp: packet = True", "receiver\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now sending file ({ProgressBar.byte_rescale(fsize)})\")", "set with parameters\") try: self._recving_file = True path = join(", "saving file\") else: return True def recv_file(self): \"\"\"Receive a file.\"\"\"", "= unpack_str(sender_hn) self._sender_socket.send(b'\\x06') # ACK except timeout: return printerror(\"Operation timed", "not self.param_set: return printerror(\"Sender not yet set with parameters\") def", "return printalert(\"Rejected file transfer\") except error: return printerror(\"Sender isn't available", "initializing sockets') self.param_set = False def __del__(self): if isinstance(self.socket, socket):", "with open(fpath, 'br') as f: f.seek(start) while self._sending_file and fs:", "\") if answer.lower() == 'y': self._sender_socket.send(b'\\x06') # ACK else: self._sender_socket.send(b'\\x06')", "{gethostname()}-{b64encode(self.token).decode()} ] \" f\"is now receiving file ({ProgressBar.byte_rescale(fs_all)})\") # Progress", "printerror(\"Downloading file has been deleted\") except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't", "The main TCP port to be used. num_workers : int", "from receiver\") else: self.param_set = True class FlyterReciever: \"\"\" Handles", "self._sending_file = False return printerror(\"Operation timed out\") except: self._progress_bar.stop() self._sending_file", "file due to permission error\") while self._workers_active: try: pass except", "to be sent. file_sizes : list(int) The sizes of the", "if self.__dict__.get('workers'): for w in self.workers: w.close() def _recv_s(self): \"\"\"Receive", "Flyter file receiving processes. Note: Receives from FlyterSender instances. Parameters", "this software and associated documentation files (the \"Software\"), to deal", "= None self._sender_filename = None self._sender_filesizes = None self._packet_size =", "res = ntohs(integer) res = hex(res)[2:] res = '0'*(len(res) %", "prog = int(self.length*per) extra = self.length*round(per) > prog prog_bar =", "receiver's parameters\") if not exists(filepath): printerror(\"File doesn't exist\") def threadfunc(worker_num,", "'TB', 'PB'] p = int(log(data, 2)/10) if data else 0", "False return printerror(\"User aborted operation\") except AssertionError: self._progress_bar.stop() self._sending_file =", "1 else [] if self.workers: for w in range(num_workers): self.workers[w].bind((self.host_ip,", "all copies or substantial portions of the Software. THE SOFTWARE", "False return True def _recv_m(self): \"\"\" Receive a file with", "else '?' clear_line = \" \"*(get_terminal_size().columns - 1) print(f\"{clear_line}\\r\" \"Progress:", "subparsers.add_parser(\"send\") recv_parser = subparsers.add_parser(\"recv\") send_parser.add_argument('-i', '--ip', required=True, help=\"Target receiver's IP", "f\"ETA: {eta}\", end=\"\\r\") # Flyter Classes class FlyterSender: \"\"\" Handles", "received while not self._progress_bar.done: self._progress_bar.display() except: return printerror(\"Error with progress", "f\"{w}_{self._sender_filename}\" ) unlink(wpath) except PermissionError: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Couldn't", "open(filepath, 'br') as f: while self._sending_file and fs: packet =", "of workers to use. \"\"\" receiver = FlyterReciever(host_ip_address, port, workers)", "if FROMTERMINAL: print(f'\\n[x] {errormsg}') exit(-1) exit(-1) exit(-1) exit(-1) else: warn(errormsg)", "self._transfer_type == 'M': res = self._send_m(fpath, fsizes) assert self.socket.recv(1) ==", "self.host_ip = host_ip self.main_port = main_port self.token = <PASSWORD>_<PASSWORD>(6) self.transfer_type", "following conditions: The above copyright notice and this permission notice", "({ProgressBar.byte_rescale(fs_all)})\") # Progress bar thread self._progress_bar = ProgressBar(fs_all, 35) self._progress_bar.start()", "= '0'*(len(res) % 2) + res return bytes.fromhex(res) def bytes_to_int_s(byteseq):", "printerror(\"Couldn't access file\") except PermissionError: self._progress_bar.stop() self._sending_file = False return", "receiver's main TCP port. filepath : str The path to", "interpreter') FROMTERMINAL = False # Utility Functions def random_port(host): \"\"\"Return", "sum(fsizes) fn = pack_str(fname) len_fn = int_to_bytes_s(len(fn)) fs = [int_to_bytes_l(s)", "as f: f.seek(start) while self._sending_file and fs: end_size = f.tell()", "port to listen on. workers : :obj:`int`, optional The number", "= ntohl(integer) res = hex(res)[2:] res = '0'*(len(res) % 2)", "self._progress_bar = ProgressBar(fs_all, 35) self._progress_bar.start() def progress_thread(): try: # Wait", "doesn't exist, creates it first. Parameters ---------- hostname : str", "self.socket.send(b'\\x06') # ACK except error: return printerror(\"Error getting connected with", "received: {self._sender_filename}\") return res def send_param_set(self): \"\"\" Pack and send", "return bytes.fromhex(res) def bytes_to_int_l(byteseq): \"\"\"Convert byte sequence to 32 -", "fs_all = sum(fs) answer = input(f\"{self._sender_hostname}-{b64_tok}\" f\" wants to send:", "self._sending_file = False return printerror(f\"Receiver rejected packet\") except FileNotFoundError: self._progress_bar.stop()", ": str The filepath of the file to be sent.", "- end)) packet = f.read(size) if not packet: break sock.send(packet)", ") Thread( target=threadfunc, args=(w, wpath), ).start() except FileNotFoundError: return printerror(\"Couldn't", "Progress bar thread self._progress_bar = ProgressBar(fs_all, 35) self._progress_bar.start() def progress_thread():", "False self.start_time = time() self.start_value = self.current_val def stop(self): \"\"\"Stop", "bytes_to_int_s(byteseq): \"\"\"Convert byte sequence to 16 - but integer for", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER", "amount of workers to be used during transmission. \"\"\" @staticmethod", "= False return printerror(\"Couldn't access file\") except PermissionError: self._progress_bar.stop() self._sending_file", "except timeout: return printerror(\"Operation timed out\") except Exception: return printerror(\"Error", "# Delete the temp files for w in range(num_workers): wpath", "file_sizes : list(int) The sizes of the split-up file to", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS", "if already finished.\"\"\" return self.current_val >= self.max_value or self.stopped def", "byte sequence into a string.\"\"\" return byteseq.decode() # Utility Classes", "40) self._progress_bar.start() def progress_thread(): try: # Wait until sending file", "self.socket.close() def _send_s(self, filepath, file_size): \"\"\" Send a file with", "self.main_port)) except error: return printerror(\"Can't connect to \" f\"{self.recver_ip}:{self.main_port}\") try:", "try: len_sender_hn = bytes_to_int_s(self._sender_socket.recv(2)) sender_hn = self._sender_socket.recv(len_sender_hn) self._sender_hostname = unpack_str(sender_hn)", "= self.workers[worker_num] recver_socket.listen(1) sender_socket, hostaddr = recver_socket.accept() send_tok = sender_socket.recv(6)", "except: self._progress_bar.stop() return printerror(f\"Error while sending file\") else: self._sending_file =", "from base64 import b64encode from datetime import timedelta from math", "secrets import token_bytes from shutil import get_terminal_size from socket import", "self._send_s(fpath, fsize) elif self._transfer_type == 'M': res = self._send_m(fpath, fsizes)", "num_workers): self.host_ip = host_ip self.main_port = main_port self.token = <PASSWORD>_<PASSWORD>(6)", "to permission error\") except: return printerror(\"Error while starting to send", "precision=1): scale = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] p", "eta = timedelta(seconds=eta_s) if eta_s is not None else '?'", "copies or substantial portions of the Software. THE SOFTWARE IS", "OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED", "None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._sending_file = False self._workers_active = 0", "where that host's sent files are stored. \"\"\" app_dirname =", "None self._sender_filesizes = None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._recving_file = False", "\"\"\" Receive and unpack Receiver's parameter settings. Used to set", "= ProgressBar(fs_all, 35) self._progress_bar.start() def progress_thread(): try: # Wait until", "for packing.\"\"\" res = ntohl(integer) res = hex(res)[2:] res =", "except: self._progress_bar.stop() self._sending_file = False return printerror(f\"Error while sending file\")", "if self.workers: for w in range(num_workers): self.workers[w].bind((self.host_ip, self.worker_ports[w])) self.workers[w].settimeout(60) except:", "res = self._send_m(fpath, fsizes) assert self.socket.recv(1) == b'\\x06' # ACK", "AssertionError: self._progress_bar.stop() self._sending_file = False return printerror(f\"Receiver rejected packet\") except", "NAK fs = self._sender_filesizes[worker_num] with open(fpath, 'bw') as f: while", "to {self._recver_hostname}-{b64_tok}:\" f\" [ {fname} ]\") self.socket.send(headers) print(\"Waiting for receiver", "return host_storage_dirname else: return storage_dirname DEFAULT_PACKET_SIZE = 512 def __init__(self,", "'__main__': parser = ArgumentParser( prog=\"Flyter\", epilog=\"See '<command> --help' to read", "self._progress_bar.stop() self._sending_file = False return printerror(\"User aborted operation\") self._sending_file =", "= unpack_str(self.socket.recv(len_hn)) self._recver_token = self.socket.recv(6) self._transfer_type = unpack_str(self.socket.recv(1)) len_wp =", "\"\"\"Return if already finished.\"\"\" return self.current_val >= self.max_value or self.stopped", "global FROMTERMINAL if FROMTERMINAL: print(f'\\n[x] {errormsg}') exit(-1) exit(-1) exit(-1) exit(-1)", "self._workers_active += 1 try: recver_socket = self.workers[worker_num] recver_socket.listen(1) sender_socket, hostaddr", "None eta = timedelta(seconds=eta_s) if eta_s is not None else", "target=threadfunc, args=( w, filepath, size, size + file_sizes[w] ), ).start()", "f.read(size) if not packet: break sock.send(packet) assert sock.recv(1) == b'\\x06'", "Classes class FlyterSender: \"\"\" Handles Flyter file sending processes. Note:", "progress. Parameter --------- value : int, float Added progress value.", "assert self.socket.recv(1) == b'\\x06' # ACK except KeyboardInterrupt: return printerror(\"User", "self._sender_hostname = None self._sender_token = None self._sender_filename = None self._sender_filesizes", "same network. Parameters ---------- ip_address : str The target receiver's", "'--port', type=int, required=True, help=\"TCP port to listen on\") recv_parser.add_argument('-w', '--workers',", "except error: return printerror(\"Sender isn't available anymore\") except: self._sender_socket.send(b'\\x15') #", "for w in range(len_wp)] self.socket.send(b'\\x06') # ACK except error: return", "self.max_value - self.start_value d_time = time() - self.start_time per =", "Handles Flyter file sending processes. Note: Sends to FlyterReceiver instances.", "sending file ({ProgressBar.byte_rescale(fsize)})\") # Progress bar thread self._progress_bar = ProgressBar(fsize,", "= 0 self._progress_bar = ProgressBar(None) try: self.socket = socket(AF_INET, SOCK_STREAM)", "ACK self._progress_bar.add_progress(len(packet)) fs -= len(packet) except KeyboardInterrupt: self._progress_bar.stop() self._sending_file =", "= input(f\"{self._sender_hostname}-{b64_tok}\" f\" wants to send: {fn} \" f\"({ProgressBar.byte_rescale(fs_all)}). \"", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR", "sequence to 32 - but integer for unpacking.\"\"\" res =", "self.main_port = main_port self.token = <PASSWORD>_<PASSWORD>(6) self._recver_hostname = None self._recver_token", "def random_port(host): \"\"\"Return a random available TCP port.\"\"\" while True:", "self._progress_bar.stop() return printerror(\"Downloading file has been deleted\") except PermissionError: self._progress_bar.stop()", "= join( FlyterReciever.storage_dir(self._sender_hostname), self._sender_filename ) fs = self._sender_filesizes[0] with open(path,", "pack_str(gethostname()) len_sender_hn = int_to_bytes_s(len(sender_hn)) self.socket.send(b''.join([len_sender_hn, sender_hn])) assert self.socket.recv(1) == b'\\x06'", "32 - but integer for unpacking.\"\"\" res = bytes.hex(byteseq) res", "OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "str The name of the subdirectory where that host's sent", "print(f\"\\nSuccessfully sent: {fname}\") return res def recv_param_set(self): \"\"\" Receive and", "receiver's host port to listen on. workers : :obj:`int`, optional", "---------- hostname : str The name of the subdirectory where", "sockets\") except: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error while saving file\")", "Headers try: tok = self._sender_socket.recv(6) b64_tok = b64encode(tok).decode() len_fn =", "not exists(host_storage_dirname): mkdir(host_storage_dirname) return host_storage_dirname else: return storage_dirname DEFAULT_PACKET_SIZE =", "= None except: self._progress_bar.stop() self._recving_file = False return printerror(\"Receiving file", "' '*(self.length - (prog + extra)) rate = d_value/d_time if", "in range(num_workers) ] if num_workers > 1 else [] self._sender_socket", "The path to the file to be sent. \"\"\" sender", "f: while self._recving_file and f.writable() and fs: packet = sender_socket.recv(self._packet_size)", "NAK return printerror(\"Couldn't save file due to permissions\") except error:", "progress bar. length : :obj:`int`, optional The length of the", "None self._worker_ports = None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE self._sending_file = False", "self._sender_socket = None self._sender_hostname = None self._sender_token = None self._sender_filename", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN", "try: sender_hn = pack_str(gethostname()) len_sender_hn = int_to_bytes_s(len(sender_hn)) self.socket.send(b''.join([len_sender_hn, sender_hn])) assert", "Speeds up transmission rate by using multiple workers. \"\"\" if", "= join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) Thread( target=threadfunc, args=(w, wpath), ).start()", "= [ random_port(self.host_ip) for w in range(num_workers) ] if num_workers", "return printerror(\"Operation timed out\") except error: self._progress_bar.stop() self._recving_file = False", "str The filepath of the file to be sent. \"\"\"", "except: printerror('Error initializing sockets') self.param_set = False def __del__(self): if", "string.\"\"\" return byteseq.decode() # Utility Classes class ProgressBar: \"\"\" For", "= False return printerror(\"Error while receiving file\") finally: self._workers_active -=", "aborted operation\") except timeout: self._progress_bar.stop() self._recving_file = False return printerror(\"Operation", "alert message.\"\"\" global FROMTERMINAL print(f'[!] {alert}') def int_to_bytes_s(integer): \"\"\"Convert 16", "= None self.start_value = None self.stopped = False @property def", "try: if self.transfer_type == 'S': res = self._recv_s() elif self.transfer_type", "return printerror(\"Error during handshake\") try: hn = pack_str(gethostname()) len_hn =", "except error: return printerror(\"Can't connect to \" f\"{self.recver_ip}:{self.main_port}\") try: sender_hn", "send_file(self, filepath): \"\"\" Send a file. Parameters ---------- filepath :", "transmission. \"\"\" @staticmethod def storage_dir(hostname=None): \"\"\" Return the path of", "0 self.length = length self.rate = None self.start_time = None", "def recv_param_set(self): \"\"\" Receive and unpack Receiver's parameter settings. Used", "= unpack_str(self._sender_socket.recv(len_fn)) len_fs = bytes_to_int_s(self._sender_socket.recv(2)) fs = [bytes_to_int_l(self._sender_socket.recv(4)) for s", "len(argv) > 1: FROMTERMINAL = True args = parser.parse_args() if", "] \" f\"is now receiving file ({ProgressBar.byte_rescale(fs_all)})\") # Progress bar", "PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access storage directory\") except error: self._progress_bar.stop()", "b'\\x06' # ACK except: return printerror(\"Error while sending headers to", "65536) with socket(AF_INET, SOCK_STREAM) as sock: try: sock.bind((host, port)) except", "default=1, help=\"TCP port to listen on\") if len(argv) > 1:", "self._recving_file = False return printerror(\"Operation timed out\") except error: self._progress_bar.stop()", "main TCP port to be used. num_workers : int The", "not packet: break sock.send(packet) assert sock.recv(1) == b'\\x06' # ACK", "self.socket.recv(6) self._transfer_type = unpack_str(self.socket.recv(1)) len_wp = bytes_to_int_s(self.socket.recv(2)) self._worker_ports = [bytes_to_int_s(self.socket.recv(2))", "None self._transfer_type = None self._worker_ports = None self._packet_size = FlyterSender.DEFAULT_PACKET_SIZE", "from FlyterSender instances. Parameters ---------- host_ip : str The Host", "DEALINGS IN THE SOFTWARE. \"\"\" from argparse import ArgumentParser from", "self._sending_file = True try: size = 0 for w in", "socket(AF_INET, SOCK_STREAM) self.socket.bind((self.host_ip, self.main_port)) self.socket.settimeout(60) self.workers = [ socket(AF_INET, SOCK_STREAM)", "= self._recv_m() else: res = None except: self._progress_bar.stop() self._recving_file =", "and f.writable() and fs: packet = sender_socket.recv(self._packet_size) f.write(packet) self._progress_bar.add_progress(len(packet)) fs", "scale = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] p =", "self.stopped = False @property def done(self): \"\"\"Return if already finished.\"\"\"", "parser.parse_args() if args.action == \"send\": send(args.ip, args.port, args.file) elif args.action", "> 1 else [] self._sender_socket = None self._sender_hostname = None", "help=\"Target receiver's IP address\") send_parser.add_argument('-p', '--port', type=int, required=True, help=\"Target receiver's", "printerror(\"Receiver rejected\") except timeout: return printerror(\"Operation timed out\") except Exception:", "due to permission error\") while self._workers_active: try: pass except KeyboardInterrupt:", "# Simplified Functions def send(ip_address, port, filepath): \"\"\" Send file", "isinstance(self.__dict__.get('socket'), socket): self.socket.close() if self.__dict__.get('workers'): for w in self.workers: w.close()", "used. num_workers : int The amount of workers to be", "len_hn = bytes_to_int_s(self.socket.recv(2)) self._recver_hostname = unpack_str(self.socket.recv(len_hn)) self._recver_token = self.socket.recv(6) self._transfer_type", "'▌'*extra spaces = ' '*(self.length - (prog + extra)) rate", "= True while packet: packet = temp.read(self._packet_size) output.write(packet) # Clear", "while preparing headers\") try: b64_tok = b64encode(self._recver_token).decode() printalert(f\"Sending to {self._recver_hostname}-{b64_tok}:\"", "def send_param_set(self): \"\"\" Pack and send Receiver's parameter settings. Used", "exit(-1) exit(-1) else: warn(errormsg) def printalert(alert): \"\"\"Print an alert message.\"\"\"", "FlyterSender.DEFAULT_PACKET_SIZE self._sending_file = False self._workers_active = 0 self._progress_bar = None", "= self._sender_socket.recv(len_sender_hn) self._sender_hostname = unpack_str(sender_hn) self._sender_socket.send(b'\\x06') # ACK except timeout:", "be performed\" ) send_parser = subparsers.add_parser(\"send\") recv_parser = subparsers.add_parser(\"recv\") send_parser.add_argument('-i',", ") fs = self._sender_filesizes[0] with open(path, 'bw') as f: while", "-= 1 num_workers = len(self.workers) self._recving_file = True try: for", "not be compatible with the version of your ' 'python", "- 1) print(f\"{clear_line}\\r\" \"Progress: \" f\"|{prog_bar}{spaces}| \" f\"{100*per:.1f}% \" f\"({ProgressBar.byte_rescale(d_value)})", "), ).start() size += file_sizes[w] except FileNotFoundError: return printerror(\"Couldn't access", "1) print(f\"{clear_line}\\r\" \"Progress: \" f\"|{prog_bar}{spaces}| \" f\"{100*per:.1f}% \" f\"({ProgressBar.byte_rescale(d_value)}) \"", "access file\") except PermissionError: self._progress_bar.stop() return printerror(\"Couldn't access file due", "with parameters\") try: self._recving_file = True path = join( FlyterReciever.storage_dir(self._sender_hostname),", "sequence to 16 - but integer for unpacking.\"\"\" res =", "fsizes) assert self.socket.recv(1) == b'\\x06' # ACK except: self._progress_bar.stop() self._sending_file", "1 try: recver_socket = self.workers[worker_num] recver_socket.listen(1) sender_socket, hostaddr = recver_socket.accept()", "out\") except error: self._progress_bar.stop() self._recving_file = False return printerror(\"Error with", "self._sender_socket.send(b'\\x15') # NAK return printerror(\"Couldn't save file due to permissions\")", "recv_file(self): \"\"\"Receive a file.\"\"\" if not self.param_set: return printerror(\"Not yet", "end - start with open(fpath, 'br') as f: f.seek(start) while", "# NAK return printerror(\"Error while saving file\") else: return True", "self._transfer_type == 'S': res = self._send_s(fpath, fsize) elif self._transfer_type ==", "token_bytes from shutil import get_terminal_size from socket import \\ socket,", "exists(host_storage_dirname): mkdir(host_storage_dirname) return host_storage_dirname else: return storage_dirname DEFAULT_PACKET_SIZE = 512", "False return True def send_file(self, filepath): \"\"\" Send a file.", "byteseq.decode() # Utility Classes class ProgressBar: \"\"\" For displaying progress", "= round((d_max_value - d_value)/rate) if rate else \\ None eta", "'S': res = self._send_s(fpath, fsize) elif self._transfer_type == 'M': res", "instances. Parameterss ---------- recver_ip : str The IP address of", "free of charge, to any person obtaining a copy of", ": int The main TCP port of the receiver. \"\"\"", "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE", "b64_tok = b64encode(self._recver_token).decode() printalert(f\"Sending to {self._recver_hostname}-{b64_tok}:\" f\" [ {fname} ]\")", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "while packet: packet = temp.read(self._packet_size) output.write(packet) # Clear the contents", "Software, and to permit persons to whom the Software is", "'MB', 'GB', 'TB', 'PB'] p = int(log(data, 2)/10) if data", "due to permission error\") except: return printerror(\"Error while starting to", "if isinstance(self.socket, socket): self.socket.close() def _send_s(self, filepath, file_size): \"\"\" Send", "int_to_bytes_s(len(fn)) fs = [int_to_bytes_l(s) for s in fsizes] fs =", "== b'\\x06' # ACK except: self._progress_bar.stop() self._sending_file = False return", "= True # Simplified Functions def send(ip_address, port, filepath): \"\"\"", ": str The path to the file to be sent.", "= 'S' if num_workers == 1 else 'M' self.worker_ports =", "rights to use, copy, modify, merge, publish, distribute, sublicense, and/or", "def bytes_to_int_s(byteseq): \"\"\"Convert byte sequence to 16 - but integer", "except: return printerror(\"Error during handshake\") try: hn = pack_str(gethostname()) len_hn", "True try: size = 0 for w in range(num_workers): Thread(", "self.worker_ports = [ random_port(self.host_ip) for w in range(num_workers) ] if", "on the same network. Parameters ---------- host_ip_address : str The", "with open(path, 'bw') as output: for w in range(num_workers): wpath", "assert self.socket.recv(1) == b'\\x06' # ACK except: self._progress_bar.stop() self._sending_file =", "in fsizes] fs = b''.join(fs) len_fs = int_to_bytes_s(num_w) headers =", "except: self._progress_bar.stop() return printerror(\"Error receiving file\") else: self._recving_file = False", "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION", "packet = True while packet: packet = temp.read(self._packet_size) output.write(packet) #", "ntohl, htons, htonl, \\ gethostname, \\ AF_INET, SOCK_STREAM from threading", "sock.recv(1) == b'\\x06' # ACK fs = end - start", "printerror(\"Can't connect to \" f\"{self.recver_ip}:{self.main_port}\") try: sender_hn = pack_str(gethostname()) len_sender_hn", "self.socket.send(b''.join([len_sender_hn, sender_hn])) assert self.socket.recv(1) == b'\\x06' # ACK except AssertionError:", "= sender_socket.recv(6) if send_tok == self._sender_token: sender_socket.send(b'\\x06') # ACK else:", "during data transmissions. \"\"\" try: printalert(\"Waiting for sender\") self.socket.listen(1) self._sender_socket,", "tr_type, len_wp, wp]) except: return printerror(\"Error building headers\") try: self._sender_socket.send(headers)", "preparing headers\") try: b64_tok = b64encode(self._recver_token).decode() printalert(f\"Sending to {self._recver_hostname}-{b64_tok}:\" f\"", "ACK fs = end - start with open(fpath, 'br') as", "= \"\"\" MIT License Copyright (c) 2021 <NAME> Permission is", "with a single worker. Parameters ---------- filepath : str The", "transmissions. \"\"\" try: self.socket.connect((self.recver_ip, self.main_port)) except error: return printerror(\"Can't connect", "anymore\") except: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error while receiving headers\")", "Parameters ---------- host_ip_address : str The receiver's host IP address.", "self._progress_bar.stop() return printerror(\"Couldn't access storage directory\") except error: self._progress_bar.stop() return", "'Flyter') if not exists(appfiles_dirname): mkdir(appfiles_dirname) storage_dirname = join(appfiles_dirname, 'Received Files')", "def receive(host_ip_address, port, workers=1): \"\"\" Receive a file from sender", "printerror(\"Error receiving file\") else: self._recving_file = False return True def", "return printerror(\"Couldn't save file due to permissions\") except error: return", "1 try: with socket(AF_INET, SOCK_STREAM) as sock: sock.connect( (self.recver_ip, self._worker_ports[worker_num])", "the subdirectory where that host's sent files are stored. \"\"\"", "address to be used. main_port : int The main TCP", "\"\"\" Handles Flyter file receiving processes. Note: Receives from FlyterSender", "self.transfer_type == 'S': res = self._recv_s() elif self.transfer_type == 'M':", "sock: try: sock.bind((host, port)) except error: continue else: return port", "# Start receiving try: if self.transfer_type == 'S': res =", "of charge, to any person obtaining a copy of this", "try: size = 0 for w in range(num_workers): Thread( target=threadfunc,", "printerror(\"Error with sockets\") except: self._sender_socket.send(b'\\x15') # NAK return printerror(\"Error while", "self._progress_bar.start() def progress_thread(): try: # Wait until receiving file while", "True: port = randint(10_000, 65536) with socket(AF_INET, SOCK_STREAM) as sock:", "global FROMTERMINAL print(f'[!] {alert}') def int_to_bytes_s(integer): \"\"\"Convert 16 - bit", "fsizes] fs = b''.join(fs) len_fs = int_to_bytes_s(num_w) headers = b''.join([tok,", "self.param_set = False def __del__(self): if isinstance(self.socket, socket): self.socket.close() def", "printerror(\"Error with sockets\") except: self._progress_bar.stop() self._recving_file = False return printerror(\"Error", "until receiving file while not self._recving_file: pass # Display until", "import warn from sys import argv, exit, version_info if version_info", "The above copyright notice and this permission notice shall be", "printalert(alert): \"\"\"Print an alert message.\"\"\" global FROMTERMINAL print(f'[!] {alert}') def", "on the same network using raw sockets. Doesn't use encryption.", "= recver_ip self.main_port = main_port self.token = <PASSWORD>_<PASSWORD>(6) self._recver_hostname =", "2)/10) if data else 0 r_bytes = round(data/pow(2, 10*p), precision)", "'python interpreter') FROMTERMINAL = False # Utility Functions def random_port(host):", "# ACK except KeyboardInterrupt: self._progress_bar.stop() self._recving_file = False return printerror(\"User", "of your ' 'python interpreter') FROMTERMINAL = False # Utility", "printalert(f\"Sending to {self._recver_hostname}-{b64_tok}:\" f\" [ {fname} ]\") self.socket.send(headers) print(\"Waiting for", "addrport = self.socket.accept() except timeout: return printerror(\"No sender available\") except:", "associated documentation files (the \"Software\"), to deal in the Software", "while sending headers to receiver\") print(f\"[ {gethostname()}-{b64encode(self.token).decode()} ] \" f\"is", "try: for w in range(len(self.worker_ports)): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\"", "in range(len(self.worker_ports)): wpath = join( FlyterReciever.storage_dir(self._sender_hostname), f\"{w}_{self._sender_filename}\" ) Thread( target=threadfunc,", "sock.connect( (self.recver_ip, self._worker_ports[worker_num]) ) sock.send(self.token) assert sock.recv(1) == b'\\x06' #", "IP address of the receiver. main_port : int The main", "f\" wants to send: {fn} \" f\"({ProgressBar.byte_rescale(fs_all)}). \" \"Accept? (y/n)", "timed out\") except: self._progress_bar.stop() self._sending_file = False return printerror(f\"Error while", "= len(self._worker_ports) self._sending_file = True try: size = 0 for", "0 for w in range(num_workers): Thread( target=threadfunc, args=( w, filepath,", "# Clear the contents of the temp file open(wpath, 'bw').close()" ]
[ "= is_training self.use_labels = use_labels self.vocab_size = vocab_size self.hidden_size =", "to next_input_ids next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size) next_attn_mask = tf.cast(ids_tensor((self.batch_size,", "is a problem with the signature check. # Test passes", "passes for TFLEDModel, but not for TFLEDForConditionalGeneration # IMO the", "if self.is_encoder_decoder: model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states,", "both tensors, raise a nice Assertion error.\"\"\" if a is", "attention only attends to `self.attention_window` and one before and one", "tf.concat( [tf.zeros_like(input_ids)[:, :-1], tf.ones_like(input_ids)[:, -1:]], axis=-1, ) inputs_dict[\"global_attention_mask\"] = global_attention_mask", "2.0 (the \"License\"); # you may not use this file", "not close, or a and b arent both tensors, raise", "config.pad_token_id), tf.int8) return { \"input_ids\": input_ids, \"attention_mask\": attention_mask, \"decoder_input_ids\": decoder_input_ids,", "= \"{} != {}\".format(a, b) if prefix: msg = prefix", "): if attention_mask is None: attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8)", "but TFLongformerSelfAttention # returns attention of shape [num_attention_heads, encoder_seq_length, self.attention_window", "from `seq_length`. Relevant for # the `test_attention_outputs` and `test_hidden_states_output` tests", "has special attentions which are not # compatible in graph", "self.assertEqual(config.output_hidden_states, False) check_decoder_attentions_output(outputs) # Check that output attentions can also", "in self.all_model_classes: inputs_dict[\"output_attentions\"] = True inputs_dict[\"use_cache\"] = False config.output_hidden_states =", "use_labels=False, vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37, hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=20, eos_token_id=2,", "= ids_tensor([self.batch_size, self.seq_length], self.vocab_size) config = self.config_cls( vocab_size=self.vocab_size, d_model=self.hidden_size, encoder_layers=self.num_hidden_layers,", "TFModelTesterMixin, ids_tensor if is_tf_available(): import tensorflow as tf from transformers", ":] attention_mask = inputs_dict[\"attention_mask\"][:1, :] self.batch_size = 1 # first", "{ \"input_ids\": input_ids, \"attention_mask\": attention_mask, \"decoder_input_ids\": decoder_input_ids, \"decoder_attention_mask\": decoder_attention_mask, }", "= hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads self.intermediate_size =", "`encoder_seq_length`, is different from `seq_length`. Relevant for # the `test_attention_outputs`", "= model(next_tokens, attention_mask=next_attention_mask, past_key_values=past_key_values)[0] self.parent.assertEqual(next_tokens.shape[1], output_from_past.shape[1]) # select random slice", "= False config.output_hidden_states = False model = model_class(config) outputs =", "input_ids and next_input_ids = tf.concat([input_ids, next_tokens], axis=-1) next_attention_mask = tf.concat([attention_mask,", "that outputs are equal for slice tf.debugging.assert_near(output_from_past_slice, output_from_no_past_slice, rtol=1e-3) def", "# first forward pass outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) output,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "= tf.cast(tf.math.not_equal(decoder_input_ids, config.pad_token_id), tf.int8) return { \"input_ids\": input_ids, \"attention_mask\": attention_mask,", "seq_length], ) def check_encoder_attentions_output(outputs): attentions = [t.numpy() for t in", "input_ids = inputs_dict[\"input_ids\"] input_ids = input_ids[:1, :] attention_mask = inputs_dict[\"attention_mask\"][:1,", "attention_probs_dropout_prob=0.1, max_position_embeddings=20, eos_token_id=2, pad_token_id=1, bos_token_id=0, attention_window=4, ): self.parent = parent", "inputs_dict): model = TFLEDModel(config=config).get_decoder() input_ids = inputs_dict[\"input_ids\"] input_ids = input_ids[:1,", "def test_attention_outputs(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() inputs_dict[\"global_attention_mask\"] = tf.zeros_like(inputs_dict[\"attention_mask\"]) num_global_attn_indices", "+ 1] # because its local attention only attends to", "pass!!! PVP: # IMO there is a problem with the", "eos_tensor], axis=1) decoder_input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size) config = self.config_cls(", "= model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(out_len + (2 if self.is_encoder_decoder else 1),", "# change to intended input here input_ids = _long_tensor([512 *", "import LEDConfig, is_tf_available from transformers.testing_utils import require_tf, slow from .test_configuration_common", "list(global_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, num_global_attn_indices], ) for model_class in self.all_model_classes: inputs_dict[\"output_attentions\"]", "328, 740, 1140, 12695, 69]]) inputs_dict = prepare_led_inputs_dict(model.config, input_ids, decoder_input_ids)", "ConfigTester from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor if is_tf_available(): import tensorflow", "before and one after self.key_length = self.attention_window + 1 #", "after self.key_length = self.attention_window + 1 # because of padding", "(1, 1024, 768) self.assertEqual(output.shape, expected_shape) # change to expected output", "self.seq_length % self.attention_window) % self.attention_window ) def prepare_config_and_inputs_for_common(self): input_ids =", "isinstance(model.get_input_embeddings(), tf.keras.layers.Layer) x = model.get_output_layer_with_bias() assert x is None name", "= 1e-4 @slow @require_tf class TFLEDModelIntegrationTest(unittest.TestCase): def test_inference_no_head(self): model =", "1024, 768) self.assertEqual(output.shape, expected_shape) # change to expected output here", "input_ids = ids_tensor([self.batch_size, self.seq_length - 1], self.vocab_size) eos_tensor = tf.expand_dims(tf.constant([self.eos_token_id]", "because of padding `encoder_seq_length`, is different from `seq_length`. Relevant for", "is None name = model.get_prefix_bias_name() assert name is None def", "self.model_tester = TFLEDModelTester(self) self.config_tester = ConfigTester(self, config_class=LEDConfig) def test_config(self): self.config_tester.run_common_tests()", ".test_configuration_common import ConfigTester from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor if is_tf_available():", "# select random slice random_slice_idx = int(ids_tensor((1,), output_from_past.shape[-1])) output_from_no_past_slice =", "tf.expand_dims(tf.constant([self.eos_token_id] * self.batch_size), 1) input_ids = tf.concat([input_ids, eos_tensor], axis=1) decoder_input_ids", "bos_token_id self.attention_window = attention_window # `ModelTesterMixin.test_attention_outputs` is expecting attention tensors", "= prepare_led_inputs_dict(model.config, input_ids, decoder_input_ids) output = model(**inputs_dict)[0] expected_shape = (1,", "self.assertEqual(output.shape, expected_shape) # change to expected output here expected_slice =", "under the License. import unittest from transformers import LEDConfig, is_tf_available", "use this file except in compliance with the License. #", "next_tokens], axis=-1) next_attention_mask = tf.concat([attention_mask, next_attn_mask], axis=-1) output_from_no_past = model(next_input_ids,", "= tf.cast(ids_tensor((self.batch_size, 3), 2), tf.int8) # append to next input_ids", "= parent self.batch_size = batch_size self.seq_length = seq_length self.is_training =", "of padding `encoder_seq_length`, is different from `seq_length`. Relevant for #", "attention_mask=next_attention_mask)[0] output_from_past = model(next_tokens, attention_mask=next_attention_mask, past_key_values=past_key_values)[0] self.parent.assertEqual(next_tokens.shape[1], output_from_past.shape[1]) # select", ":] < num_global_attn_indices, 1, inputs_dict[\"global_attention_mask\"], ) config.return_dict = True seq_length", "self.eos_token_id = eos_token_id self.pad_token_id = pad_token_id self.bos_token_id = bos_token_id self.attention_window", "change to expected output here expected_slice = tf.convert_to_tensor( [[2.3050, 2.8279,", "reserved. # # Licensed under the Apache License, Version 2.0", "1140, 12695, 69]]) decoder_input_ids = _long_tensor([128 * [0, 31414, 232,", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "<NAME> and The HuggingFace Inc. team. All rights reserved. #", "= tf.expand_dims(tf.constant([self.eos_token_id] * self.batch_size), 1) input_ids = tf.concat([input_ids, eos_tensor], axis=1)", "- 1], self.vocab_size) eos_tensor = tf.expand_dims(tf.constant([self.eos_token_id] * self.batch_size), 1) input_ids", "to `self.attention_window` and one before and one after self.key_length =", "License. # You may obtain a copy of the License", "list(attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, seq_length], ) self.assertListEqual( list(global_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, num_global_attn_indices],", "be changed via the config del inputs_dict[\"output_attentions\"] config.output_attentions = True", "self.config_tester = ConfigTester(self, config_class=LEDConfig) def test_config(self): self.config_tester.run_common_tests() def test_decoder_model_past_large_inputs(self): config_and_inputs", "random_slice_idx] output_from_past_slice = output_from_past[:, :, random_slice_idx] # test that outputs", "is fine inputs_dict[\"output_attentions\"] = True config.output_hidden_states = True model =", "[tf.zeros_like(input_ids)[:, :-1], tf.ones_like(input_ids)[:, -1:]], axis=-1, ) inputs_dict[\"global_attention_mask\"] = global_attention_mask return", "= int(ids_tensor((1,), output_from_past.shape[-1])) output_from_no_past_slice = output_from_no_past[:, -3:, random_slice_idx] output_from_past_slice =", "under the License is distributed on an \"AS IS\" BASIS,", "False) check_encoder_attentions_output(outputs) if self.is_encoder_decoder: model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict,", "License for the specific language governing permissions and # limitations", "IMO the reason is that the tensor variable name cannot", "1140, 12695, 69]]) inputs_dict = prepare_led_inputs_dict(model.config, input_ids, decoder_input_ids) output =", "(TFLEDForConditionalGeneration,) if is_tf_available() else () is_encoder_decoder = True test_pruning =", "model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) out_len = len(outputs)", "is None: decoder_attention_mask = tf.cast(tf.math.not_equal(decoder_input_ids, config.pad_token_id), tf.int8) return { \"input_ids\":", ") tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=TOLERANCE) def test_inference_with_head(self): model =", "input_ids, decoder_input_ids) output = model(**inputs_dict)[0] expected_shape = (1, 1024, model.config.vocab_size)", "name = model.get_prefix_bias_name() assert name is None def test_attention_outputs(self): config,", "# Copyright <NAME>, <NAME>, <NAME> and The HuggingFace Inc. team.", "if is_tf_available() else () is_encoder_decoder = True test_pruning = False", "def __init__( self, parent, batch_size=13, seq_length=7, is_training=True, use_labels=False, vocab_size=99, hidden_size=32,", "which poses a BIG restrictions pass def _assert_tensors_equal(a, b, atol=1e-12,", "# [num_attention_heads, encoder_seq_length, encoder_key_length], but TFLongformerSelfAttention # returns attention of", "328, 740, 1140, 12695, 69]]) decoder_input_ids = _long_tensor([128 * [0,", "outputs = model(self._prepare_for_class(inputs_dict, model_class)) out_len = len(outputs) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs)", "num_global_attn_indices, 1, inputs_dict[\"global_attention_mask\"], ) config.return_dict = True seq_length = self.model_tester.seq_length", "is_tf_available from transformers.testing_utils import require_tf, slow from .test_configuration_common import ConfigTester", "tf.int8) # append to next input_ids and next_input_ids = tf.concat([input_ids,", "- self.seq_length % self.attention_window) % self.attention_window ) def prepare_config_and_inputs_for_common(self): input_ids", "signature check. # Test passes for TFLEDModel, but not for", "output = model(**inputs_dict)[0] expected_shape = (1, 1024, model.config.vocab_size) self.assertEqual(output.shape, expected_shape)", "\"gelu\" def __init__( self, parent, batch_size=13, seq_length=7, is_training=True, use_labels=False, vocab_size=99,", "= ids_tensor((self.batch_size, 3), config.vocab_size) next_attn_mask = tf.cast(ids_tensor((self.batch_size, 3), 2), tf.int8)", "output_from_no_past[:, -3:, random_slice_idx] output_from_past_slice = output_from_past[:, :, random_slice_idx] # test", "= tf.concat([input_ids, eos_tensor], axis=1) decoder_input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size) config", "self.intermediate_size = intermediate_size self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings", "import unittest from transformers import LEDConfig, is_tf_available from transformers.testing_utils import", "inputs_dict[\"output_attentions\"] config.output_attentions = True model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict,", "self.attention_window = attention_window # `ModelTesterMixin.test_attention_outputs` is expecting attention tensors to", "**self.config_updates, ) inputs_dict = prepare_led_inputs_dict(config, input_ids, decoder_input_ids) global_attention_mask = tf.concat(", "1e-4 @slow @require_tf class TFLEDModelIntegrationTest(unittest.TestCase): def test_inference_no_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\").led", "and The HuggingFace Inc. team. All rights reserved. # #", "eos_token_id=2, pad_token_id=1, bos_token_id=0, attention_window=4, ): self.parent = parent self.batch_size =", "test_pruning = False def setUp(self): self.model_tester = TFLEDModelTester(self) self.config_tester =", "import require_tf, slow from .test_configuration_common import ConfigTester from .test_modeling_tf_common import", "in outputs.encoder_attentions] global_attentions = [t.numpy() for t in outputs.encoder_global_attentions] self.assertEqual(len(attentions),", "Exception: msg = \"{} != {}\".format(a, b) if prefix: msg", ":3], expected_slice, atol=TOLERANCE) def test_inference_with_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\") # change", "[t.numpy() for t in outputs.encoder_global_attentions] self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) self.assertEqual(len(global_attentions), self.model_tester.num_hidden_layers) self.assertListEqual(", "in compliance with the License. # You may obtain a", "slow from .test_configuration_common import ConfigTester from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor", "TFLEDForConditionalGeneration, TFLEDModel @require_tf class TFLEDModelTester: config_cls = LEDConfig config_updates =", "software # distributed under the License is distributed on an", "output attentions can also be changed via the config del", "msg = prefix + \": \" + msg raise AssertionError(msg)", "inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: model = model_class(config)", "return config, inputs_dict def check_decoder_model_past_large_inputs(self, config, inputs_dict): model = TFLEDModel(config=config).get_decoder()", "3), config.vocab_size) next_attn_mask = tf.cast(ids_tensor((self.batch_size, 3), 2), tf.int8) # append", "seq_length self.is_training = is_training self.use_labels = use_labels self.vocab_size = vocab_size", "of shape [num_attention_heads, encoder_seq_length, self.attention_window + 1] # because its", "inputs_dict[\"output_attentions\"] = True inputs_dict[\"use_cache\"] = False config.output_hidden_states = False model", "= [t.numpy() for t in outputs.encoder_attentions] global_attentions = [t.numpy() for", "self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: model = model_class(config) assert isinstance(model.get_input_embeddings(),", "from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor if is_tf_available(): import tensorflow as", "append to next input_ids and next_input_ids = tf.concat([input_ids, next_tokens], axis=-1)", "also be changed via the config del inputs_dict[\"output_attentions\"] config.output_attentions =", "= tf.concat( [tf.zeros_like(input_ids)[:, :-1], tf.ones_like(input_ids)[:, -1:]], axis=-1, ) inputs_dict[\"global_attention_mask\"] =", "# append to next input_ids and next_input_ids = tf.concat([input_ids, next_tokens],", "= True test_pruning = False def setUp(self): self.model_tester = TFLEDModelTester(self)", "expected_slice = tf.convert_to_tensor( [[33.6507, 6.4572, 16.8089], [5.8739, -2.4238, 11.2902], [-3.2139,", "expected_shape) # change to expected output here expected_slice = tf.convert_to_tensor(", "[[33.6507, 6.4572, 16.8089], [5.8739, -2.4238, 11.2902], [-3.2139, -4.3149, 4.2783]], )", "self.batch_size = 1 # first forward pass outputs = model(input_ids,", "if prefix: msg = prefix + \": \" + msg", "and one before and one after self.key_length = self.attention_window +", "past_key_values = outputs.to_tuple() past_key_values = past_key_values[1] # create hypothetical next", "[self.model_tester.num_attention_heads, encoder_seq_length, num_global_attn_indices], ) for model_class in self.all_model_classes: inputs_dict[\"output_attentions\"] =", "return True try: if tf.debugging.assert_near(a, b, atol=atol): return True raise", "atol=atol): return True raise except Exception: msg = \"{} !=", "# Check attention is always last and order is fine", "return { \"input_ids\": input_ids, \"attention_mask\": attention_mask, \"decoder_input_ids\": decoder_input_ids, \"decoder_attention_mask\": decoder_attention_mask,", "Relevant for # the `test_attention_outputs` and `test_hidden_states_output` tests self.encoder_seq_length =", "config.pad_token_id), tf.int8) if decoder_attention_mask is None: decoder_attention_mask = tf.cast(tf.math.not_equal(decoder_input_ids, config.pad_token_id),", "# longformer has special attentions which are not # compatible", "attention_window=self.attention_window, **self.config_updates, ) inputs_dict = prepare_led_inputs_dict(config, input_ids, decoder_input_ids) global_attention_mask =", "False) check_decoder_attentions_output(outputs) # Check that output attentions can also be", "self.assertEqual(model.config.output_hidden_states, True) check_encoder_attentions_output(outputs) @slow def test_saved_model_with_attentions_output(self): # longformer has special", "self.assertListEqual( list(global_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, num_global_attn_indices], ) for model_class in self.all_model_classes:", "prepare_led_inputs_dict(model.config, input_ids, decoder_input_ids) output = model(**inputs_dict)[0] expected_shape = (1, 1024,", "decoder_input_ids, \"decoder_attention_mask\": decoder_attention_mask, } @require_tf class TFLEDModelTest(TFModelTesterMixin, unittest.TestCase): all_model_classes =", "= inputs_dict[\"input_ids\"] input_ids = input_ids[:1, :] attention_mask = inputs_dict[\"attention_mask\"][:1, :]", "= model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(out_len + (2 if", "<NAME>, <NAME> and The HuggingFace Inc. team. All rights reserved.", "\"\"\"If tensors not close, or a and b arent both", "one after self.key_length = self.attention_window + 1 # because of", "# Check that output attentions can also be changed via", "self.all_model_classes: model = model_class(config) assert isinstance(model.get_input_embeddings(), tf.keras.layers.Layer) x = model.get_output_layer_with_bias()", "nice Assertion error.\"\"\" if a is None and b is", "0.6531], [-1.8457, -0.1455, -3.5661], [-1.0186, 0.4586, -2.2043]], ) tf.debugging.assert_near(output[:, :3,", "problem with the signature check. # Test passes for TFLEDModel,", "only attends to `self.attention_window` and one before and one after", "# returns attention of shape [num_attention_heads, encoder_seq_length, self.attention_window + 1]", "[num_attention_heads, encoder_seq_length, encoder_key_length], but TFLongformerSelfAttention # returns attention of shape", "( self.seq_length + (self.attention_window - self.seq_length % self.attention_window) % self.attention_window", "model_class)) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) # Check attention is always last", "# TODO(JPLU, PVP) this test should pass!!! PVP: # IMO", "output = model(**inputs_dict)[0] expected_shape = (1, 1024, 768) self.assertEqual(output.shape, expected_shape)", "config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: model =", "= True config.output_hidden_states = True model = model_class(config) outputs =", "test_attention_outputs(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() inputs_dict[\"global_attention_mask\"] = tf.zeros_like(inputs_dict[\"attention_mask\"]) num_global_attn_indices =", "inputs_dict = prepare_led_inputs_dict(config, input_ids, decoder_input_ids) global_attention_mask = tf.concat( [tf.zeros_like(input_ids)[:, :-1],", "# create hypothetical next token and extent to next_input_ids next_tokens", "self.model_tester.num_hidden_layers) self.assertEqual(len(global_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, seq_length], ) self.assertListEqual(", "True raise except Exception: msg = \"{} != {}\".format(a, b)", "a and b arent both tensors, raise a nice Assertion", "= use_labels self.vocab_size = vocab_size self.hidden_size = hidden_size self.num_hidden_layers =", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "extent to next_input_ids next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size) next_attn_mask =", "() is_encoder_decoder = True test_pruning = False def setUp(self): self.model_tester", "prefix=\"\"): \"\"\"If tensors not close, or a and b arent", "= inputs_dict[\"attention_mask\"][:1, :] self.batch_size = 1 # first forward pass", "tests self.encoder_seq_length = ( self.seq_length + (self.attention_window - self.seq_length %", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "= attention_window # `ModelTesterMixin.test_attention_outputs` is expecting attention tensors to be", "(TFLEDForConditionalGeneration, TFLEDModel) if is_tf_available() else () all_generative_model_classes = (TFLEDForConditionalGeneration,) if", "\"input_ids\": input_ids, \"attention_mask\": attention_mask, \"decoder_input_ids\": decoder_input_ids, \"decoder_attention_mask\": decoder_attention_mask, } @require_tf", "to in writing, software # distributed under the License is", "LEDConfig, is_tf_available from transformers.testing_utils import require_tf, slow from .test_configuration_common import", "inputs_dict[\"input_ids\"] input_ids = input_ids[:1, :] attention_mask = inputs_dict[\"attention_mask\"][:1, :] self.batch_size", "= {} hidden_act = \"gelu\" def __init__( self, parent, batch_size=13,", "self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) self.assertEqual(len(global_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, seq_length], )", "arent both tensors, raise a nice Assertion error.\"\"\" if a", "and one after self.key_length = self.attention_window + 1 # because", "next_attn_mask = tf.cast(ids_tensor((self.batch_size, 3), 2), tf.int8) # append to next", "# See the License for the specific language governing permissions", "tf.zeros_like(inputs_dict[\"attention_mask\"]) num_global_attn_indices = 2 inputs_dict[\"global_attention_mask\"] = tf.where( tf.range(self.model_tester.seq_length)[None, :] <", "self.parent = parent self.batch_size = batch_size self.seq_length = seq_length self.is_training", "= model(input_ids, attention_mask=attention_mask, use_cache=True) output, past_key_values = outputs.to_tuple() past_key_values =", "next_attention_mask = tf.concat([attention_mask, next_attn_mask], axis=-1) output_from_no_past = model(next_input_ids, attention_mask=next_attention_mask)[0] output_from_past", "None name = model.get_prefix_bias_name() assert name is None def test_attention_outputs(self):", "# because of padding `encoder_seq_length`, is different from `seq_length`. Relevant", "or agreed to in writing, software # distributed under the", "TFLEDModel) if is_tf_available() else () all_generative_model_classes = (TFLEDForConditionalGeneration,) if is_tf_available()", "attentions = [t.numpy() for t in outputs.encoder_attentions] global_attentions = [t.numpy()", "= output_from_past[:, :, random_slice_idx] # test that outputs are equal", "= [t.numpy() for t in outputs.encoder_global_attentions] self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) self.assertEqual(len(global_attentions), self.model_tester.num_hidden_layers)", "required by applicable law or agreed to in writing, software", "if tf.debugging.assert_near(a, b, atol=atol): return True raise except Exception: msg", "tensors to be of size # [num_attention_heads, encoder_seq_length, encoder_key_length], but", "model(next_tokens, attention_mask=next_attention_mask, past_key_values=past_key_values)[0] self.parent.assertEqual(next_tokens.shape[1], output_from_past.shape[1]) # select random slice random_slice_idx", "= input_ids[:1, :] attention_mask = inputs_dict[\"attention_mask\"][:1, :] self.batch_size = 1", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "next_input_ids = tf.concat([input_ids, next_tokens], axis=-1) next_attention_mask = tf.concat([attention_mask, next_attn_mask], axis=-1)", "all_generative_model_classes = (TFLEDForConditionalGeneration,) if is_tf_available() else () is_encoder_decoder = True", "ids_tensor((self.batch_size, 3), config.vocab_size) next_attn_mask = tf.cast(ids_tensor((self.batch_size, 3), 2), tf.int8) #", "with the License. # You may obtain a copy of", "= eos_token_id self.pad_token_id = pad_token_id self.bos_token_id = bos_token_id self.attention_window =", "assert name is None def test_attention_outputs(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()", "69]]) decoder_input_ids = _long_tensor([128 * [0, 31414, 232, 328, 740,", "tensors, raise a nice Assertion error.\"\"\" if a is None", "config, inputs_dict): model = TFLEDModel(config=config).get_decoder() input_ids = inputs_dict[\"input_ids\"] input_ids =", "TFLEDModel(config=config).get_decoder() input_ids = inputs_dict[\"input_ids\"] input_ids = input_ids[:1, :] attention_mask =", "= model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) # Check attention is", "self.seq_length], self.vocab_size) config = self.config_cls( vocab_size=self.vocab_size, d_model=self.hidden_size, encoder_layers=self.num_hidden_layers, decoder_layers=self.num_hidden_layers, encoder_attention_heads=self.num_attention_heads,", "model_class)) self.assertEqual(config.output_hidden_states, False) check_decoder_attentions_output(outputs) # Check that output attentions can", "raise AssertionError(msg) def _long_tensor(tok_lst): return tf.constant(tok_lst, dtype=tf.int32) TOLERANCE = 1e-4", "= tf.convert_to_tensor( [[2.3050, 2.8279, 0.6531], [-1.8457, -0.1455, -3.5661], [-1.0186, 0.4586,", "\"decoder_attention_mask\": decoder_attention_mask, } @require_tf class TFLEDModelTest(TFModelTesterMixin, unittest.TestCase): all_model_classes = (TFLEDForConditionalGeneration,", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "= model(**inputs_dict)[0] expected_shape = (1, 1024, 768) self.assertEqual(output.shape, expected_shape) #", "inputs_dict[\"global_attention_mask\"] = tf.zeros_like(inputs_dict[\"attention_mask\"]) num_global_attn_indices = 2 inputs_dict[\"global_attention_mask\"] = tf.where( tf.range(self.model_tester.seq_length)[None,", "# IMO the reason is that the tensor variable name", "to intended input here input_ids = _long_tensor([512 * [0, 31414,", "is_training self.use_labels = use_labels self.vocab_size = vocab_size self.hidden_size = hidden_size", "assert x is None name = model.get_prefix_bias_name() assert name is", "distributed under the License is distributed on an \"AS IS\"", "\"{} != {}\".format(a, b) if prefix: msg = prefix +", "outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) # Check attention", "atol=1e-12, prefix=\"\"): \"\"\"If tensors not close, or a and b", "decoder_input_ids = _long_tensor([128 * [0, 31414, 232, 328, 740, 1140,", "transformers import LEDConfig, is_tf_available from transformers.testing_utils import require_tf, slow from", "decoder_attention_mask is None: decoder_attention_mask = tf.cast(tf.math.not_equal(decoder_input_ids, config.pad_token_id), tf.int8) return {", "can also be changed via the config del inputs_dict[\"output_attentions\"] config.output_attentions", ":3, :3], expected_slice, atol=TOLERANCE) def test_inference_with_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\") #", "del inputs_dict[\"output_attentions\"] config.output_attentions = True model = model_class(config) outputs =", "is that the tensor variable name cannot be changed #", "return tf.constant(tok_lst, dtype=tf.int32) TOLERANCE = 1e-4 @slow @require_tf class TFLEDModelIntegrationTest(unittest.TestCase):", "def test_inference_with_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\") # change to intended input", "1 # because of padding `encoder_seq_length`, is different from `seq_length`.", "hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.eos_token_id = eos_token_id", "express or implied. # See the License for the specific", "= vocab_size self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads =", "slice random_slice_idx = int(ids_tensor((1,), output_from_past.shape[-1])) output_from_no_past_slice = output_from_no_past[:, -3:, random_slice_idx]", "= False def setUp(self): self.model_tester = TFLEDModelTester(self) self.config_tester = ConfigTester(self,", "except in compliance with the License. # You may obtain", "model(next_input_ids, attention_mask=next_attention_mask)[0] output_from_past = model(next_tokens, attention_mask=next_attention_mask, past_key_values=past_key_values)[0] self.parent.assertEqual(next_tokens.shape[1], output_from_past.shape[1]) #", "= 1 # first forward pass outputs = model(input_ids, attention_mask=attention_mask,", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "but not for TFLEDForConditionalGeneration # IMO the reason is that", "tf from transformers import TFLEDForConditionalGeneration, TFLEDModel @require_tf class TFLEDModelTester: config_cls", "config.vocab_size) next_attn_mask = tf.cast(ids_tensor((self.batch_size, 3), 2), tf.int8) # append to", "# Test passes for TFLEDModel, but not for TFLEDForConditionalGeneration #", "self.seq_length - 1], self.vocab_size) eos_tensor = tf.expand_dims(tf.constant([self.eos_token_id] * self.batch_size), 1)", "None def test_attention_outputs(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() inputs_dict[\"global_attention_mask\"] = tf.zeros_like(inputs_dict[\"attention_mask\"])", "test_model_common_attributes(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: model", "decoder_input_ids) output = model(**inputs_dict)[0] expected_shape = (1, 1024, model.config.vocab_size) self.assertEqual(output.shape,", "writing, software # distributed under the License is distributed on", "else 1), len(outputs)) self.assertEqual(model.config.output_hidden_states, True) check_encoder_attentions_output(outputs) @slow def test_saved_model_with_attentions_output(self): #", "check_decoder_model_past_large_inputs(self, config, inputs_dict): model = TFLEDModel(config=config).get_decoder() input_ids = inputs_dict[\"input_ids\"] input_ids", "= num_hidden_layers self.num_attention_heads = num_attention_heads self.intermediate_size = intermediate_size self.hidden_dropout_prob =", "you may not use this file except in compliance with", "should pass!!! PVP: # IMO there is a problem with", "which are not # compatible in graph mode pass @slow", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "tf.int8) if decoder_attention_mask is None: decoder_attention_mask = tf.cast(tf.math.not_equal(decoder_input_ids, config.pad_token_id), tf.int8)", "@require_tf class TFLEDModelIntegrationTest(unittest.TestCase): def test_inference_no_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\").led # change", "attentions which are not # compatible in graph mode pass", "True config.output_hidden_states = True model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict,", "is_tf_available() else () all_generative_model_classes = (TFLEDForConditionalGeneration,) if is_tf_available() else ()", "forward pass outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) output, past_key_values =", "tf.debugging.assert_near(output_from_past_slice, output_from_no_past_slice, rtol=1e-3) def prepare_led_inputs_dict( config, input_ids, decoder_input_ids, attention_mask=None, decoder_attention_mask=None,", "intended input here input_ids = _long_tensor([512 * [0, 31414, 232,", "parent self.batch_size = batch_size self.seq_length = seq_length self.is_training = is_training", "except Exception: msg = \"{} != {}\".format(a, b) if prefix:", "-1:]], axis=-1, ) inputs_dict[\"global_attention_mask\"] = global_attention_mask return config, inputs_dict def", "\"decoder_input_ids\": decoder_input_ids, \"decoder_attention_mask\": decoder_attention_mask, } @require_tf class TFLEDModelTest(TFModelTesterMixin, unittest.TestCase): all_model_classes", "b, atol=1e-12, prefix=\"\"): \"\"\"If tensors not close, or a and", "CONDITIONS OF ANY KIND, either express or implied. # See", "self.attention_window + 1 # because of padding `encoder_seq_length`, is different", "decoder_attention_heads=self.num_attention_heads, encoder_ffn_dim=self.intermediate_size, decoder_ffn_dim=self.intermediate_size, dropout=self.hidden_dropout_prob, attention_dropout=self.attention_probs_dropout_prob, max_position_embeddings=self.max_position_embeddings, eos_token_ids=[2], bos_token_id=self.bos_token_id, pad_token_id=self.pad_token_id, decoder_start_token_id=self.pad_token_id,", "parent, batch_size=13, seq_length=7, is_training=True, use_labels=False, vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37,", ") for model_class in self.all_model_classes: inputs_dict[\"output_attentions\"] = True inputs_dict[\"use_cache\"] =", "this test should pass!!! PVP: # IMO there is a", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "def _assert_tensors_equal(a, b, atol=1e-12, prefix=\"\"): \"\"\"If tensors not close, or", "attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) if decoder_attention_mask is None: decoder_attention_mask", "[num_attention_heads, encoder_seq_length, self.attention_window + 1] # because its local attention", "setUp(self): self.model_tester = TFLEDModelTester(self) self.config_tester = ConfigTester(self, config_class=LEDConfig) def test_config(self):", "coding=utf-8 # Copyright <NAME>, <NAME>, <NAME> and The HuggingFace Inc.", "max_position_embeddings self.eos_token_id = eos_token_id self.pad_token_id = pad_token_id self.bos_token_id = bos_token_id", "attention_dropout=self.attention_probs_dropout_prob, max_position_embeddings=self.max_position_embeddings, eos_token_ids=[2], bos_token_id=self.bos_token_id, pad_token_id=self.pad_token_id, decoder_start_token_id=self.pad_token_id, attention_window=self.attention_window, **self.config_updates, ) inputs_dict", "self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs) def test_model_common_attributes(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for model_class in", "seq_length], ) self.assertListEqual( list(global_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, num_global_attn_indices], ) for model_class", "TODO(JPLU, PVP) this test should pass!!! PVP: # IMO there", "0.4586, -2.2043]], ) tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=TOLERANCE) def test_inference_with_head(self):", "-2.4238, 11.2902], [-3.2139, -4.3149, 4.2783]], ) tf.debugging.assert_near(output[:, :3, :3], expected_slice,", "governing permissions and # limitations under the License. import unittest", "[0, 31414, 232, 328, 740, 1140, 12695, 69]]) decoder_input_ids =", "12695, 69]]) decoder_input_ids = _long_tensor([128 * [0, 31414, 232, 328,", "is None: return True try: if tf.debugging.assert_near(a, b, atol=atol): return", "dropout=self.hidden_dropout_prob, attention_dropout=self.attention_probs_dropout_prob, max_position_embeddings=self.max_position_embeddings, eos_token_ids=[2], bos_token_id=self.bos_token_id, pad_token_id=self.pad_token_id, decoder_start_token_id=self.pad_token_id, attention_window=self.attention_window, **self.config_updates, )", "or a and b arent both tensors, raise a nice", "# `ModelTesterMixin.test_attention_outputs` is expecting attention tensors to be of size", "input_ids, decoder_input_ids, attention_mask=None, decoder_attention_mask=None, ): if attention_mask is None: attention_mask", "PVP: # IMO there is a problem with the signature", "= (TFLEDForConditionalGeneration,) if is_tf_available() else () is_encoder_decoder = True test_pruning", "model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) out_len = len(outputs) self.assertEqual(config.output_hidden_states, False)", "model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs)", "compatible in graph mode pass @slow def test_saved_model_with_hidden_states_output(self): # TODO(JPLU,", "vocab_size=self.vocab_size, d_model=self.hidden_size, encoder_layers=self.num_hidden_layers, decoder_layers=self.num_hidden_layers, encoder_attention_heads=self.num_attention_heads, decoder_attention_heads=self.num_attention_heads, encoder_ffn_dim=self.intermediate_size, decoder_ffn_dim=self.intermediate_size, dropout=self.hidden_dropout_prob, attention_dropout=self.attention_probs_dropout_prob,", "= len(outputs) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) if self.is_encoder_decoder: model = model_class(config)", "outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(out_len + (2 if self.is_encoder_decoder else", "are equal for slice tf.debugging.assert_near(output_from_past_slice, output_from_no_past_slice, rtol=1e-3) def prepare_led_inputs_dict( config,", "= TFLEDModel(config=config).get_decoder() input_ids = inputs_dict[\"input_ids\"] input_ids = input_ids[:1, :] attention_mask", "self.attention_window + 1] # because its local attention only attends", "create hypothetical next token and extent to next_input_ids next_tokens =", "attention_mask is None: attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) if decoder_attention_mask", "unittest from transformers import LEDConfig, is_tf_available from transformers.testing_utils import require_tf,", "is_training=True, use_labels=False, vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37, hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=20,", "special attentions which are not # compatible in graph mode", "config_updates = {} hidden_act = \"gelu\" def __init__( self, parent,", "ConfigTester(self, config_class=LEDConfig) def test_config(self): self.config_tester.run_common_tests() def test_decoder_model_past_large_inputs(self): config_and_inputs = self.model_tester.prepare_config_and_inputs_for_common()", "tf.convert_to_tensor( [[33.6507, 6.4572, 16.8089], [5.8739, -2.4238, 11.2902], [-3.2139, -4.3149, 4.2783]],", "{} hidden_act = \"gelu\" def __init__( self, parent, batch_size=13, seq_length=7,", "self.model_tester.num_hidden_layers) self.assertListEqual( list(attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, seq_length], ) self.assertListEqual( list(global_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads,", "check_encoder_attentions_output(outputs) @slow def test_saved_model_with_attentions_output(self): # longformer has special attentions which", "class TFLEDModelTester: config_cls = LEDConfig config_updates = {} hidden_act =", "to next input_ids and next_input_ids = tf.concat([input_ids, next_tokens], axis=-1) next_attention_mask", "input here input_ids = _long_tensor([512 * [0, 31414, 232, 328,", ") inputs_dict = prepare_led_inputs_dict(config, input_ids, decoder_input_ids) global_attention_mask = tf.concat( [tf.zeros_like(input_ids)[:,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "that the tensor variable name cannot be changed # from", "the License is distributed on an \"AS IS\" BASIS, #", "attention_mask, \"decoder_input_ids\": decoder_input_ids, \"decoder_attention_mask\": decoder_attention_mask, } @require_tf class TFLEDModelTest(TFModelTesterMixin, unittest.TestCase):", "inputs_dict[\"global_attention_mask\"], ) config.return_dict = True seq_length = self.model_tester.seq_length encoder_seq_length =", "= outputs.decoder_attentions self.assertEqual(len(decoder_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(decoder_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, seq_length, seq_length], )", "= True model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states,", "next_attn_mask], axis=-1) output_from_no_past = model(next_input_ids, attention_mask=next_attention_mask)[0] output_from_past = model(next_tokens, attention_mask=next_attention_mask,", "batch_size self.seq_length = seq_length self.is_training = is_training self.use_labels = use_labels", "num_attention_heads=4, intermediate_size=37, hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=20, eos_token_id=2, pad_token_id=1, bos_token_id=0, attention_window=4, ):", "@slow def test_saved_model_with_hidden_states_output(self): # TODO(JPLU, PVP) this test should pass!!!", "and b arent both tensors, raise a nice Assertion error.\"\"\"", "error.\"\"\" if a is None and b is None: return", "prefix + \": \" + msg raise AssertionError(msg) def _long_tensor(tok_lst):", "in graph mode pass @slow def test_saved_model_with_hidden_states_output(self): # TODO(JPLU, PVP)", "None: attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) if decoder_attention_mask is None:", "config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() inputs_dict[\"global_attention_mask\"] = tf.zeros_like(inputs_dict[\"attention_mask\"]) num_global_attn_indices = 2", "import TFLEDForConditionalGeneration, TFLEDModel @require_tf class TFLEDModelTester: config_cls = LEDConfig config_updates", "input_ids = tf.concat([input_ids, eos_tensor], axis=1) decoder_input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)", "= \"gelu\" def __init__( self, parent, batch_size=13, seq_length=7, is_training=True, use_labels=False,", "= num_attention_heads self.intermediate_size = intermediate_size self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob =", "self.vocab_size) eos_tensor = tf.expand_dims(tf.constant([self.eos_token_id] * self.batch_size), 1) input_ids = tf.concat([input_ids,", "self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs) def test_model_common_attributes(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for model_class", "decoder_attentions = outputs.decoder_attentions self.assertEqual(len(decoder_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(decoder_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, seq_length, seq_length],", "and next_input_ids = tf.concat([input_ids, next_tokens], axis=-1) next_attention_mask = tf.concat([attention_mask, next_attn_mask],", "order is fine inputs_dict[\"output_attentions\"] = True config.output_hidden_states = True model", "+ msg raise AssertionError(msg) def _long_tensor(tok_lst): return tf.constant(tok_lst, dtype=tf.int32) TOLERANCE", "= self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: model = model_class(config) assert", "not for TFLEDForConditionalGeneration # IMO the reason is that the", "1] # because its local attention only attends to `self.attention_window`", "a is None and b is None: return True try:", "self.key_length = self.attention_window + 1 # because of padding `encoder_seq_length`,", "= model.get_prefix_bias_name() assert name is None def test_attention_outputs(self): config, inputs_dict", "inputs_dict = prepare_led_inputs_dict(model.config, input_ids, decoder_input_ids) output = model(**inputs_dict)[0] expected_shape =", "pass outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) output, past_key_values = outputs.to_tuple()", "msg raise AssertionError(msg) def _long_tensor(tok_lst): return tf.constant(tok_lst, dtype=tf.int32) TOLERANCE =", "be of size # [num_attention_heads, encoder_seq_length, encoder_key_length], but TFLongformerSelfAttention #", "self.model_tester.seq_length encoder_seq_length = self.model_tester.encoder_seq_length def check_decoder_attentions_output(outputs): decoder_attentions = outputs.decoder_attentions self.assertEqual(len(decoder_attentions),", "all_model_classes = (TFLEDForConditionalGeneration, TFLEDModel) if is_tf_available() else () all_generative_model_classes =", "def check_decoder_attentions_output(outputs): decoder_attentions = outputs.decoder_attentions self.assertEqual(len(decoder_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(decoder_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads,", "output_from_past_slice = output_from_past[:, :, random_slice_idx] # test that outputs are", "1], self.vocab_size) eos_tensor = tf.expand_dims(tf.constant([self.eos_token_id] * self.batch_size), 1) input_ids =", "TFLEDModel, but not for TFLEDForConditionalGeneration # IMO the reason is", "the config del inputs_dict[\"output_attentions\"] config.output_attentions = True model = model_class(config)", "law or agreed to in writing, software # distributed under", "pass def _assert_tensors_equal(a, b, atol=1e-12, prefix=\"\"): \"\"\"If tensors not close,", "test_saved_model_with_hidden_states_output(self): # TODO(JPLU, PVP) this test should pass!!! PVP: #", "decoder_start_token_id=self.pad_token_id, attention_window=self.attention_window, **self.config_updates, ) inputs_dict = prepare_led_inputs_dict(config, input_ids, decoder_input_ids) global_attention_mask", "not # compatible in graph mode pass @slow def test_saved_model_with_hidden_states_output(self):", "t in outputs.encoder_global_attentions] self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) self.assertEqual(len(global_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(attentions[0].shape[-3:]), [self.model_tester.num_attention_heads,", "= ( self.seq_length + (self.attention_window - self.seq_length % self.attention_window) %", "raise except Exception: msg = \"{} != {}\".format(a, b) if", "model_class(config) assert isinstance(model.get_input_embeddings(), tf.keras.layers.Layer) x = model.get_output_layer_with_bias() assert x is", "= model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_decoder_attentions_output(outputs) # Check that output", "\": \" + msg raise AssertionError(msg) def _long_tensor(tok_lst): return tf.constant(tok_lst,", "x = model.get_output_layer_with_bias() assert x is None name = model.get_prefix_bias_name()", "expected_slice, atol=TOLERANCE) def test_inference_with_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\") # change to", ":, random_slice_idx] # test that outputs are equal for slice", "attention_mask = inputs_dict[\"attention_mask\"][:1, :] self.batch_size = 1 # first forward", "b) if prefix: msg = prefix + \": \" +", "69]]) inputs_dict = prepare_led_inputs_dict(model.config, input_ids, decoder_input_ids) output = model(**inputs_dict)[0] expected_shape", "+ (self.attention_window - self.seq_length % self.attention_window) % self.attention_window ) def", "The HuggingFace Inc. team. All rights reserved. # # Licensed", "output_from_no_past_slice = output_from_no_past[:, -3:, random_slice_idx] output_from_past_slice = output_from_past[:, :, random_slice_idx]", "PVP) this test should pass!!! PVP: # IMO there is", "expected output here expected_slice = tf.convert_to_tensor( [[33.6507, 6.4572, 16.8089], [5.8739,", "always last and order is fine inputs_dict[\"output_attentions\"] = True config.output_hidden_states", "and extent to next_input_ids next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size) next_attn_mask", "self.batch_size), 1) input_ids = tf.concat([input_ids, eos_tensor], axis=1) decoder_input_ids = ids_tensor([self.batch_size,", "# change to expected output here expected_slice = tf.convert_to_tensor( [[2.3050,", "model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_decoder_attentions_output(outputs) # Check that output attentions", "input_ids, \"attention_mask\": attention_mask, \"decoder_input_ids\": decoder_input_ids, \"decoder_attention_mask\": decoder_attention_mask, } @require_tf class", "attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.eos_token_id = eos_token_id self.pad_token_id = pad_token_id", "self.assertEqual(len(decoder_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(decoder_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, seq_length, seq_length], ) def check_encoder_attentions_output(outputs):", "require_tf, slow from .test_configuration_common import ConfigTester from .test_modeling_tf_common import TFModelTesterMixin,", "1, inputs_dict[\"global_attention_mask\"], ) config.return_dict = True seq_length = self.model_tester.seq_length encoder_seq_length", "may obtain a copy of the License at # #", ":-1], tf.ones_like(input_ids)[:, -1:]], axis=-1, ) inputs_dict[\"global_attention_mask\"] = global_attention_mask return config,", "@require_tf class TFLEDModelTest(TFModelTesterMixin, unittest.TestCase): all_model_classes = (TFLEDForConditionalGeneration, TFLEDModel) if is_tf_available()", "self.config_tester.run_common_tests() def test_decoder_model_past_large_inputs(self): config_and_inputs = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs) def test_model_common_attributes(self): config,", "= False model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) out_len", "random_slice_idx = int(ids_tensor((1,), output_from_past.shape[-1])) output_from_no_past_slice = output_from_no_past[:, -3:, random_slice_idx] output_from_past_slice", "b arent both tensors, raise a nice Assertion error.\"\"\" if", "model.config.vocab_size) self.assertEqual(output.shape, expected_shape) # change to expected output here expected_slice", "past_key_values = past_key_values[1] # create hypothetical next token and extent", "= hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.eos_token_id =", "self.vocab_size) config = self.config_cls( vocab_size=self.vocab_size, d_model=self.hidden_size, encoder_layers=self.num_hidden_layers, decoder_layers=self.num_hidden_layers, encoder_attention_heads=self.num_attention_heads, decoder_attention_heads=self.num_attention_heads,", "prepare_led_inputs_dict( config, input_ids, decoder_input_ids, attention_mask=None, decoder_attention_mask=None, ): if attention_mask is", "def test_saved_model_with_attentions_output(self): # longformer has special attentions which are not", "expected output here expected_slice = tf.convert_to_tensor( [[2.3050, 2.8279, 0.6531], [-1.8457,", "to expected output here expected_slice = tf.convert_to_tensor( [[33.6507, 6.4572, 16.8089],", "def test_inference_no_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\").led # change to intended input", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "encoder_seq_length, self.attention_window + 1] # because its local attention only", "def check_decoder_model_past_large_inputs(self, config, inputs_dict): model = TFLEDModel(config=config).get_decoder() input_ids = inputs_dict[\"input_ids\"]", "inputs_dict[\"use_cache\"] = False config.output_hidden_states = False model = model_class(config) outputs", "Copyright <NAME>, <NAME>, <NAME> and The HuggingFace Inc. team. All", "dtype=tf.int32) TOLERANCE = 1e-4 @slow @require_tf class TFLEDModelIntegrationTest(unittest.TestCase): def test_inference_no_head(self):", "raise a nice Assertion error.\"\"\" if a is None and", "[self.model_tester.num_attention_heads, seq_length, seq_length], ) def check_encoder_attentions_output(outputs): attentions = [t.numpy() for", "inputs_dict[\"attention_mask\"][:1, :] self.batch_size = 1 # first forward pass outputs", "_long_tensor([128 * [0, 31414, 232, 328, 740, 1140, 12695, 69]])", "may not use this file except in compliance with the", "eos_token_id self.pad_token_id = pad_token_id self.bos_token_id = bos_token_id self.attention_window = attention_window", "team. All rights reserved. # # Licensed under the Apache", "config_and_inputs = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs) def test_model_common_attributes(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()", "mode pass @slow def test_saved_model_with_hidden_states_output(self): # TODO(JPLU, PVP) this test", "tf.concat([input_ids, next_tokens], axis=-1) next_attention_mask = tf.concat([attention_mask, next_attn_mask], axis=-1) output_from_no_past =", "232, 328, 740, 1140, 12695, 69]]) decoder_input_ids = _long_tensor([128 *", "encoder_seq_length, encoder_key_length], but TFLongformerSelfAttention # returns attention of shape [num_attention_heads,", "def prepare_led_inputs_dict( config, input_ids, decoder_input_ids, attention_mask=None, decoder_attention_mask=None, ): if attention_mask", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "model_class in self.all_model_classes: inputs_dict[\"output_attentions\"] = True inputs_dict[\"use_cache\"] = False config.output_hidden_states", ") inputs_dict[\"global_attention_mask\"] = global_attention_mask return config, inputs_dict def check_decoder_model_past_large_inputs(self, config,", "this file except in compliance with the License. # You", "shape [num_attention_heads, encoder_seq_length, self.attention_window + 1] # because its local", "None: return True try: if tf.debugging.assert_near(a, b, atol=atol): return True", "outputs.encoder_attentions] global_attentions = [t.numpy() for t in outputs.encoder_global_attentions] self.assertEqual(len(attentions), self.model_tester.num_hidden_layers)", "prepare_config_and_inputs_for_common(self): input_ids = ids_tensor([self.batch_size, self.seq_length - 1], self.vocab_size) eos_tensor =", "TFLEDModelIntegrationTest(unittest.TestCase): def test_inference_no_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\").led # change to intended", "config_cls = LEDConfig config_updates = {} hidden_act = \"gelu\" def", "seq_length=7, is_training=True, use_labels=False, vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37, hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1,", "IMO there is a problem with the signature check. #", "+ (2 if self.is_encoder_decoder else 1), len(outputs)) self.assertEqual(model.config.output_hidden_states, True) check_encoder_attentions_output(outputs)", "`self.attention_window` and one before and one after self.key_length = self.attention_window", "output_from_past[:, :, random_slice_idx] # test that outputs are equal for", "} @require_tf class TFLEDModelTest(TFModelTesterMixin, unittest.TestCase): all_model_classes = (TFLEDForConditionalGeneration, TFLEDModel) if", "# test that outputs are equal for slice tf.debugging.assert_near(output_from_past_slice, output_from_no_past_slice,", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "output here expected_slice = tf.convert_to_tensor( [[2.3050, 2.8279, 0.6531], [-1.8457, -0.1455,", "None and b is None: return True try: if tf.debugging.assert_near(a,", "is None and b is None: return True try: if", "model_class)) self.assertEqual(out_len + (2 if self.is_encoder_decoder else 1), len(outputs)) self.assertEqual(model.config.output_hidden_states,", "# # Licensed under the Apache License, Version 2.0 (the", "model = TFLEDModel(config=config).get_decoder() input_ids = inputs_dict[\"input_ids\"] input_ids = input_ids[:1, :]", "because its local attention only attends to `self.attention_window` and one", "atol=TOLERANCE) def test_inference_with_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\") # change to intended", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "inputs_dict[\"output_attentions\"] = True config.output_hidden_states = True model = model_class(config) outputs", "transformers import TFLEDForConditionalGeneration, TFLEDModel @require_tf class TFLEDModelTester: config_cls = LEDConfig", "= tf.concat([input_ids, next_tokens], axis=-1) next_attention_mask = tf.concat([attention_mask, next_attn_mask], axis=-1) output_from_no_past", "= ConfigTester(self, config_class=LEDConfig) def test_config(self): self.config_tester.run_common_tests() def test_decoder_model_past_large_inputs(self): config_and_inputs =", "None: decoder_attention_mask = tf.cast(tf.math.not_equal(decoder_input_ids, config.pad_token_id), tf.int8) return { \"input_ids\": input_ids,", "Test passes for TFLEDModel, but not for TFLEDForConditionalGeneration # IMO", "change to intended input here input_ids = _long_tensor([512 * [0,", "3), 2), tf.int8) # append to next input_ids and next_input_ids", "740, 1140, 12695, 69]]) decoder_input_ids = _long_tensor([128 * [0, 31414,", "tf.concat([attention_mask, next_attn_mask], axis=-1) output_from_no_past = model(next_input_ids, attention_mask=next_attention_mask)[0] output_from_past = model(next_tokens,", "def prepare_config_and_inputs_for_common(self): input_ids = ids_tensor([self.batch_size, self.seq_length - 1], self.vocab_size) eos_tensor", "= past_key_values[1] # create hypothetical next token and extent to", "vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37, hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=20, eos_token_id=2, pad_token_id=1,", "tf.int8) return { \"input_ids\": input_ids, \"attention_mask\": attention_mask, \"decoder_input_ids\": decoder_input_ids, \"decoder_attention_mask\":", "import TFModelTesterMixin, ids_tensor if is_tf_available(): import tensorflow as tf from", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "* self.batch_size), 1) input_ids = tf.concat([input_ids, eos_tensor], axis=1) decoder_input_ids =", "input_ids = _long_tensor([512 * [0, 31414, 232, 328, 740, 1140,", "axis=1) decoder_input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size) config = self.config_cls( vocab_size=self.vocab_size,", "tensor variable name cannot be changed # from decoder_input_ids ->", "b, atol=atol): return True raise except Exception: msg = \"{}", "batch_size=13, seq_length=7, is_training=True, use_labels=False, vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37, hidden_dropout_prob=0.1,", "outputs.to_tuple() past_key_values = past_key_values[1] # create hypothetical next token and", "model(**inputs_dict)[0] expected_shape = (1, 1024, model.config.vocab_size) self.assertEqual(output.shape, expected_shape) # change", "in outputs.encoder_global_attentions] self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) self.assertEqual(len(global_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length,", "self.num_attention_heads = num_attention_heads self.intermediate_size = intermediate_size self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob", "is None: attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) if decoder_attention_mask is", "# from decoder_input_ids -> input_ids, which poses a BIG restrictions", "AssertionError(msg) def _long_tensor(tok_lst): return tf.constant(tok_lst, dtype=tf.int32) TOLERANCE = 1e-4 @slow", "@slow @require_tf class TFLEDModelIntegrationTest(unittest.TestCase): def test_inference_no_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\").led #", "1) input_ids = tf.concat([input_ids, eos_tensor], axis=1) decoder_input_ids = ids_tensor([self.batch_size, self.seq_length],", "rights reserved. # # Licensed under the Apache License, Version", "# compatible in graph mode pass @slow def test_saved_model_with_hidden_states_output(self): #", "inputs_dict[\"global_attention_mask\"] = global_attention_mask return config, inputs_dict def check_decoder_model_past_large_inputs(self, config, inputs_dict):", "test_inference_with_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\") # change to intended input here", "= attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.eos_token_id = eos_token_id self.pad_token_id =", "= tf.convert_to_tensor( [[33.6507, 6.4572, 16.8089], [5.8739, -2.4238, 11.2902], [-3.2139, -4.3149,", "name is None def test_attention_outputs(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() inputs_dict[\"global_attention_mask\"]", "True inputs_dict[\"use_cache\"] = False config.output_hidden_states = False model = model_class(config)", "output_from_no_past_slice, rtol=1e-3) def prepare_led_inputs_dict( config, input_ids, decoder_input_ids, attention_mask=None, decoder_attention_mask=None, ):", "[-1.8457, -0.1455, -3.5661], [-1.0186, 0.4586, -2.2043]], ) tf.debugging.assert_near(output[:, :3, :3],", "Check that output attentions can also be changed via the", "and order is fine inputs_dict[\"output_attentions\"] = True config.output_hidden_states = True", "_long_tensor([512 * [0, 31414, 232, 328, 740, 1140, 12695, 69]])", "= self.model_tester.prepare_config_and_inputs_for_common() inputs_dict[\"global_attention_mask\"] = tf.zeros_like(inputs_dict[\"attention_mask\"]) num_global_attn_indices = 2 inputs_dict[\"global_attention_mask\"] =", "tensors not close, or a and b arent both tensors,", "language governing permissions and # limitations under the License. import", "output_from_past.shape[1]) # select random slice random_slice_idx = int(ids_tensor((1,), output_from_past.shape[-1])) output_from_no_past_slice", "-3.5661], [-1.0186, 0.4586, -2.2043]], ) tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=TOLERANCE)", "random slice random_slice_idx = int(ids_tensor((1,), output_from_past.shape[-1])) output_from_no_past_slice = output_from_no_past[:, -3:,", ") def prepare_config_and_inputs_for_common(self): input_ids = ids_tensor([self.batch_size, self.seq_length - 1], self.vocab_size)", "hypothetical next token and extent to next_input_ids next_tokens = ids_tensor((self.batch_size,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "longformer has special attentions which are not # compatible in", "model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(out_len + (2 if self.is_encoder_decoder", "output_from_past = model(next_tokens, attention_mask=next_attention_mask, past_key_values=past_key_values)[0] self.parent.assertEqual(next_tokens.shape[1], output_from_past.shape[1]) # select random", "= _long_tensor([128 * [0, 31414, 232, 328, 740, 1140, 12695,", "def test_saved_model_with_hidden_states_output(self): # TODO(JPLU, PVP) this test should pass!!! PVP:", "attention_window=4, ): self.parent = parent self.batch_size = batch_size self.seq_length =", "= LEDConfig config_updates = {} hidden_act = \"gelu\" def __init__(", "output here expected_slice = tf.convert_to_tensor( [[33.6507, 6.4572, 16.8089], [5.8739, -2.4238,", "pad_token_id=1, bos_token_id=0, attention_window=4, ): self.parent = parent self.batch_size = batch_size", "seq_length = self.model_tester.seq_length encoder_seq_length = self.model_tester.encoder_seq_length def check_decoder_attentions_output(outputs): decoder_attentions =", "decoder_attention_mask, } @require_tf class TFLEDModelTest(TFModelTesterMixin, unittest.TestCase): all_model_classes = (TFLEDForConditionalGeneration, TFLEDModel)", "# change to expected output here expected_slice = tf.convert_to_tensor( [[33.6507,", "and `test_hidden_states_output` tests self.encoder_seq_length = ( self.seq_length + (self.attention_window -", "next token and extent to next_input_ids next_tokens = ids_tensor((self.batch_size, 3),", "= output_from_no_past[:, -3:, random_slice_idx] output_from_past_slice = output_from_past[:, :, random_slice_idx] #", "tf.keras.layers.Layer) x = model.get_output_layer_with_bias() assert x is None name =", "ids_tensor([self.batch_size, self.seq_length - 1], self.vocab_size) eos_tensor = tf.expand_dims(tf.constant([self.eos_token_id] * self.batch_size),", "config.output_hidden_states = True model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class))", "def setUp(self): self.model_tester = TFLEDModelTester(self) self.config_tester = ConfigTester(self, config_class=LEDConfig) def", "to expected output here expected_slice = tf.convert_to_tensor( [[2.3050, 2.8279, 0.6531],", "or implied. # See the License for the specific language", "as tf from transformers import TFLEDForConditionalGeneration, TFLEDModel @require_tf class TFLEDModelTester:", "b is None: return True try: if tf.debugging.assert_near(a, b, atol=atol):", "[[2.3050, 2.8279, 0.6531], [-1.8457, -0.1455, -3.5661], [-1.0186, 0.4586, -2.2043]], )", "16.8089], [5.8739, -2.4238, 11.2902], [-3.2139, -4.3149, 4.2783]], ) tf.debugging.assert_near(output[:, :3,", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "max_position_embeddings=20, eos_token_id=2, pad_token_id=1, bos_token_id=0, attention_window=4, ): self.parent = parent self.batch_size", "tensorflow as tf from transformers import TFLEDForConditionalGeneration, TFLEDModel @require_tf class", "attention tensors to be of size # [num_attention_heads, encoder_seq_length, encoder_key_length],", "self.attention_window ) def prepare_config_and_inputs_for_common(self): input_ids = ids_tensor([self.batch_size, self.seq_length - 1],", "the signature check. # Test passes for TFLEDModel, but not", "31414, 232, 328, 740, 1140, 12695, 69]]) inputs_dict = prepare_led_inputs_dict(model.config,", "hidden_act = \"gelu\" def __init__( self, parent, batch_size=13, seq_length=7, is_training=True,", "self.parent.assertEqual(next_tokens.shape[1], output_from_past.shape[1]) # select random slice random_slice_idx = int(ids_tensor((1,), output_from_past.shape[-1]))", "model_class in self.all_model_classes: model = model_class(config) assert isinstance(model.get_input_embeddings(), tf.keras.layers.Layer) x", "= model_class(config) assert isinstance(model.get_input_embeddings(), tf.keras.layers.Layer) x = model.get_output_layer_with_bias() assert x", "from decoder_input_ids -> input_ids, which poses a BIG restrictions pass", "-0.1455, -3.5661], [-1.0186, 0.4586, -2.2043]], ) tf.debugging.assert_near(output[:, :3, :3], expected_slice,", "num_attention_heads self.intermediate_size = intermediate_size self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob", "cannot be changed # from decoder_input_ids -> input_ids, which poses", "BIG restrictions pass def _assert_tensors_equal(a, b, atol=1e-12, prefix=\"\"): \"\"\"If tensors", "= self.attention_window + 1 # because of padding `encoder_seq_length`, is", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "True seq_length = self.model_tester.seq_length encoder_seq_length = self.model_tester.encoder_seq_length def check_decoder_attentions_output(outputs): decoder_attentions", ") self.assertListEqual( list(global_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, num_global_attn_indices], ) for model_class in", "check_decoder_attentions_output(outputs): decoder_attentions = outputs.decoder_attentions self.assertEqual(len(decoder_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(decoder_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, seq_length,", "fine inputs_dict[\"output_attentions\"] = True config.output_hidden_states = True model = model_class(config)", "config del inputs_dict[\"output_attentions\"] config.output_attentions = True model = model_class(config) outputs", "here expected_slice = tf.convert_to_tensor( [[2.3050, 2.8279, 0.6531], [-1.8457, -0.1455, -3.5661],", "the reason is that the tensor variable name cannot be", "tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=TOLERANCE) def test_inference_with_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\")", "expected_shape = (1, 1024, model.config.vocab_size) self.assertEqual(output.shape, expected_shape) # change to", "@slow def test_saved_model_with_attentions_output(self): # longformer has special attentions which are", "intermediate_size=37, hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=20, eos_token_id=2, pad_token_id=1, bos_token_id=0, attention_window=4, ): self.parent", "= _long_tensor([512 * [0, 31414, 232, 328, 740, 1140, 12695,", "variable name cannot be changed # from decoder_input_ids -> input_ids,", "False) check_encoder_attentions_output(outputs) # Check attention is always last and order", "num_hidden_layers self.num_attention_heads = num_attention_heads self.intermediate_size = intermediate_size self.hidden_dropout_prob = hidden_dropout_prob", "= TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\").led # change to intended input here input_ids =", "tf.ones_like(input_ids)[:, -1:]], axis=-1, ) inputs_dict[\"global_attention_mask\"] = global_attention_mask return config, inputs_dict", "attention_window # `ModelTesterMixin.test_attention_outputs` is expecting attention tensors to be of", "= True model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(out_len", "\"attention_mask\": attention_mask, \"decoder_input_ids\": decoder_input_ids, \"decoder_attention_mask\": decoder_attention_mask, } @require_tf class TFLEDModelTest(TFModelTesterMixin,", "2 inputs_dict[\"global_attention_mask\"] = tf.where( tf.range(self.model_tester.seq_length)[None, :] < num_global_attn_indices, 1, inputs_dict[\"global_attention_mask\"],", "self, parent, batch_size=13, seq_length=7, is_training=True, use_labels=False, vocab_size=99, hidden_size=32, num_hidden_layers=5, num_attention_heads=4,", "import ConfigTester from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor if is_tf_available(): import", "= intermediate_size self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings =", "output_from_past.shape[-1])) output_from_no_past_slice = output_from_no_past[:, -3:, random_slice_idx] output_from_past_slice = output_from_past[:, :,", "(the \"License\"); # you may not use this file except", "self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.eos_token_id = eos_token_id self.pad_token_id", "# you may not use this file except in compliance", "ids_tensor if is_tf_available(): import tensorflow as tf from transformers import", "via the config del inputs_dict[\"output_attentions\"] config.output_attentions = True model =", "returns attention of shape [num_attention_heads, encoder_seq_length, self.attention_window + 1] #", "+ \": \" + msg raise AssertionError(msg) def _long_tensor(tok_lst): return", "close, or a and b arent both tensors, raise a", "transformers.testing_utils import require_tf, slow from .test_configuration_common import ConfigTester from .test_modeling_tf_common", "output, past_key_values = outputs.to_tuple() past_key_values = past_key_values[1] # create hypothetical", "for slice tf.debugging.assert_near(output_from_past_slice, output_from_no_past_slice, rtol=1e-3) def prepare_led_inputs_dict( config, input_ids, decoder_input_ids,", "model.get_prefix_bias_name() assert name is None def test_attention_outputs(self): config, inputs_dict =", "self.seq_length = seq_length self.is_training = is_training self.use_labels = use_labels self.vocab_size", "tf.convert_to_tensor( [[2.3050, 2.8279, 0.6531], [-1.8457, -0.1455, -3.5661], [-1.0186, 0.4586, -2.2043]],", "TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\").led # change to intended input here input_ids = _long_tensor([512", "test_saved_model_with_attentions_output(self): # longformer has special attentions which are not #", "model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(out_len + (2", "model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_decoder_attentions_output(outputs) # Check", "attention of shape [num_attention_heads, encoder_seq_length, self.attention_window + 1] # because", "def test_config(self): self.config_tester.run_common_tests() def test_decoder_model_past_large_inputs(self): config_and_inputs = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs) def", "model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_decoder_attentions_output(outputs)", "here input_ids = _long_tensor([512 * [0, 31414, 232, 328, 740,", "# # Unless required by applicable law or agreed to", "changed via the config del inputs_dict[\"output_attentions\"] config.output_attentions = True model", "@require_tf class TFLEDModelTester: config_cls = LEDConfig config_updates = {} hidden_act", "model.get_output_layer_with_bias() assert x is None name = model.get_prefix_bias_name() assert name", "= global_attention_mask return config, inputs_dict def check_decoder_model_past_large_inputs(self, config, inputs_dict): model", "% self.attention_window ) def prepare_config_and_inputs_for_common(self): input_ids = ids_tensor([self.batch_size, self.seq_length -", "if is_tf_available(): import tensorflow as tf from transformers import TFLEDForConditionalGeneration,", "global_attention_mask = tf.concat( [tf.zeros_like(input_ids)[:, :-1], tf.ones_like(input_ids)[:, -1:]], axis=-1, ) inputs_dict[\"global_attention_mask\"]", "for TFLEDForConditionalGeneration # IMO the reason is that the tensor", "and # limitations under the License. import unittest from transformers", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "[5.8739, -2.4238, 11.2902], [-3.2139, -4.3149, 4.2783]], ) tf.debugging.assert_near(output[:, :3, :3],", "-2.2043]], ) tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=TOLERANCE) def test_inference_with_head(self): model", "Version 2.0 (the \"License\"); # you may not use this", "inputs_dict def check_decoder_model_past_large_inputs(self, config, inputs_dict): model = TFLEDModel(config=config).get_decoder() input_ids =", "past_key_values=past_key_values)[0] self.parent.assertEqual(next_tokens.shape[1], output_from_past.shape[1]) # select random slice random_slice_idx = int(ids_tensor((1,),", "config_class=LEDConfig) def test_config(self): self.config_tester.run_common_tests() def test_decoder_model_past_large_inputs(self): config_and_inputs = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs)", "self.is_training = is_training self.use_labels = use_labels self.vocab_size = vocab_size self.hidden_size", "input_ids, decoder_input_ids) output = model(**inputs_dict)[0] expected_shape = (1, 1024, 768)", "self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads self.intermediate_size = intermediate_size self.hidden_dropout_prob", "# the `test_attention_outputs` and `test_hidden_states_output` tests self.encoder_seq_length = ( self.seq_length", "input_ids, decoder_input_ids) global_attention_mask = tf.concat( [tf.zeros_like(input_ids)[:, :-1], tf.ones_like(input_ids)[:, -1:]], axis=-1,", "config, input_ids, decoder_input_ids, attention_mask=None, decoder_attention_mask=None, ): if attention_mask is None:", "[t.numpy() for t in outputs.encoder_attentions] global_attentions = [t.numpy() for t", "!= {}\".format(a, b) if prefix: msg = prefix + \":", "graph mode pass @slow def test_saved_model_with_hidden_states_output(self): # TODO(JPLU, PVP) this", "test that outputs are equal for slice tf.debugging.assert_near(output_from_past_slice, output_from_no_past_slice, rtol=1e-3)", "model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) # Check attention is always", "is_encoder_decoder = True test_pruning = False def setUp(self): self.model_tester =", "implied. # See the License for the specific language governing", "class TFLEDModelTest(TFModelTesterMixin, unittest.TestCase): all_model_classes = (TFLEDForConditionalGeneration, TFLEDModel) if is_tf_available() else", "encoder_seq_length, num_global_attn_indices], ) for model_class in self.all_model_classes: inputs_dict[\"output_attentions\"] = True", "one before and one after self.key_length = self.attention_window + 1", "= model(self._prepare_for_class(inputs_dict, model_class)) out_len = len(outputs) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) if", "decoder_input_ids -> input_ids, which poses a BIG restrictions pass def", "under the Apache License, Version 2.0 (the \"License\"); # you", "is expecting attention tensors to be of size # [num_attention_heads,", "test_decoder_model_past_large_inputs(self): config_and_inputs = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs) def test_model_common_attributes(self): config, inputs_dict =", "model(input_ids, attention_mask=attention_mask, use_cache=True) output, past_key_values = outputs.to_tuple() past_key_values = past_key_values[1]", "tf.constant(tok_lst, dtype=tf.int32) TOLERANCE = 1e-4 @slow @require_tf class TFLEDModelIntegrationTest(unittest.TestCase): def", "axis=-1) next_attention_mask = tf.concat([attention_mask, next_attn_mask], axis=-1) output_from_no_past = model(next_input_ids, attention_mask=next_attention_mask)[0]", "= model.get_output_layer_with_bias() assert x is None name = model.get_prefix_bias_name() assert", "with the signature check. # Test passes for TFLEDModel, but", "size # [num_attention_heads, encoder_seq_length, encoder_key_length], but TFLongformerSelfAttention # returns attention", "11.2902], [-3.2139, -4.3149, 4.2783]], ) tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=TOLERANCE)", "tf.cast(tf.math.not_equal(decoder_input_ids, config.pad_token_id), tf.int8) return { \"input_ids\": input_ids, \"attention_mask\": attention_mask, \"decoder_input_ids\":", "All rights reserved. # # Licensed under the Apache License,", "num_hidden_layers=5, num_attention_heads=4, intermediate_size=37, hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=20, eos_token_id=2, pad_token_id=1, bos_token_id=0, attention_window=4,", "TOLERANCE = 1e-4 @slow @require_tf class TFLEDModelIntegrationTest(unittest.TestCase): def test_inference_no_head(self): model", "for # the `test_attention_outputs` and `test_hidden_states_output` tests self.encoder_seq_length = (", "by applicable law or agreed to in writing, software #", "the `test_attention_outputs` and `test_hidden_states_output` tests self.encoder_seq_length = ( self.seq_length +", "intermediate_size self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings", "self.attention_window) % self.attention_window ) def prepare_config_and_inputs_for_common(self): input_ids = ids_tensor([self.batch_size, self.seq_length", "TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\") # change to intended input here input_ids = _long_tensor([512", "= batch_size self.seq_length = seq_length self.is_training = is_training self.use_labels =", ":] self.batch_size = 1 # first forward pass outputs =", "next input_ids and next_input_ids = tf.concat([input_ids, next_tokens], axis=-1) next_attention_mask =", "check_encoder_attentions_output(outputs) if self.is_encoder_decoder: model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class))", "check_encoder_attentions_output(outputs) # Check attention is always last and order is", "else () is_encoder_decoder = True test_pruning = False def setUp(self):", "= model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) #", "model(**inputs_dict)[0] expected_shape = (1, 1024, 768) self.assertEqual(output.shape, expected_shape) # change", "padding `encoder_seq_length`, is different from `seq_length`. Relevant for # the", "seq_length, seq_length], ) def check_encoder_attentions_output(outputs): attentions = [t.numpy() for t", "outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_decoder_attentions_output(outputs) # Check that", "attention_mask=None, decoder_attention_mask=None, ): if attention_mask is None: attention_mask = tf.cast(tf.math.not_equal(input_ids,", "self.bos_token_id = bos_token_id self.attention_window = attention_window # `ModelTesterMixin.test_attention_outputs` is expecting", "check_encoder_attentions_output(outputs): attentions = [t.numpy() for t in outputs.encoder_attentions] global_attentions =", "= tf.where( tf.range(self.model_tester.seq_length)[None, :] < num_global_attn_indices, 1, inputs_dict[\"global_attention_mask\"], ) config.return_dict", "for t in outputs.encoder_global_attentions] self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) self.assertEqual(len(global_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(attentions[0].shape[-3:]),", "-3:, random_slice_idx] output_from_past_slice = output_from_past[:, :, random_slice_idx] # test that", "() all_generative_model_classes = (TFLEDForConditionalGeneration,) if is_tf_available() else () is_encoder_decoder =", "tf.debugging.assert_near(a, b, atol=atol): return True raise except Exception: msg =", "# coding=utf-8 # Copyright <NAME>, <NAME>, <NAME> and The HuggingFace", "model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\").led # change to intended input here input_ids", "is_tf_available(): import tensorflow as tf from transformers import TFLEDForConditionalGeneration, TFLEDModel", "msg = \"{} != {}\".format(a, b) if prefix: msg =", "expected_shape = (1, 1024, 768) self.assertEqual(output.shape, expected_shape) # change to", "2), tf.int8) # append to next input_ids and next_input_ids =", "d_model=self.hidden_size, encoder_layers=self.num_hidden_layers, decoder_layers=self.num_hidden_layers, encoder_attention_heads=self.num_attention_heads, decoder_attention_heads=self.num_attention_heads, encoder_ffn_dim=self.intermediate_size, decoder_ffn_dim=self.intermediate_size, dropout=self.hidden_dropout_prob, attention_dropout=self.attention_probs_dropout_prob, max_position_embeddings=self.max_position_embeddings,", "encoder_key_length], but TFLongformerSelfAttention # returns attention of shape [num_attention_heads, encoder_seq_length,", "hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=20, eos_token_id=2, pad_token_id=1, bos_token_id=0, attention_window=4, ): self.parent =", "= (1, 1024, model.config.vocab_size) self.assertEqual(output.shape, expected_shape) # change to expected", "name cannot be changed # from decoder_input_ids -> input_ids, which", "if decoder_attention_mask is None: decoder_attention_mask = tf.cast(tf.math.not_equal(decoder_input_ids, config.pad_token_id), tf.int8) return", "= model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) out_len = len(outputs) self.assertEqual(config.output_hidden_states,", "int(ids_tensor((1,), output_from_past.shape[-1])) output_from_no_past_slice = output_from_no_past[:, -3:, random_slice_idx] output_from_past_slice = output_from_past[:,", "try: if tf.debugging.assert_near(a, b, atol=atol): return True raise except Exception:", "expecting attention tensors to be of size # [num_attention_heads, encoder_seq_length,", "= self.model_tester.seq_length encoder_seq_length = self.model_tester.encoder_seq_length def check_decoder_attentions_output(outputs): decoder_attentions = outputs.decoder_attentions", "6.4572, 16.8089], [5.8739, -2.4238, 11.2902], [-3.2139, -4.3149, 4.2783]], ) tf.debugging.assert_near(output[:,", "bos_token_id=self.bos_token_id, pad_token_id=self.pad_token_id, decoder_start_token_id=self.pad_token_id, attention_window=self.attention_window, **self.config_updates, ) inputs_dict = prepare_led_inputs_dict(config, input_ids,", "pad_token_id self.bos_token_id = bos_token_id self.attention_window = attention_window # `ModelTesterMixin.test_attention_outputs` is", "# because its local attention only attends to `self.attention_window` and", "(self.attention_window - self.seq_length % self.attention_window) % self.attention_window ) def prepare_config_and_inputs_for_common(self):", "= tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) if decoder_attention_mask is None: decoder_attention_mask =", "= True seq_length = self.model_tester.seq_length encoder_seq_length = self.model_tester.encoder_seq_length def check_decoder_attentions_output(outputs):", "self.assertListEqual( list(decoder_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, seq_length, seq_length], ) def check_encoder_attentions_output(outputs): attentions =", "check. # Test passes for TFLEDModel, but not for TFLEDForConditionalGeneration", "outputs.decoder_attentions self.assertEqual(len(decoder_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(decoder_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, seq_length, seq_length], ) def", "attention_mask=next_attention_mask, past_key_values=past_key_values)[0] self.parent.assertEqual(next_tokens.shape[1], output_from_past.shape[1]) # select random slice random_slice_idx =", "config, inputs_dict def check_decoder_model_past_large_inputs(self, config, inputs_dict): model = TFLEDModel(config=config).get_decoder() input_ids", "are not # compatible in graph mode pass @slow def", "max_position_embeddings=self.max_position_embeddings, eos_token_ids=[2], bos_token_id=self.bos_token_id, pad_token_id=self.pad_token_id, decoder_start_token_id=self.pad_token_id, attention_window=self.attention_window, **self.config_updates, ) inputs_dict =", "attention_mask=attention_mask, use_cache=True) output, past_key_values = outputs.to_tuple() past_key_values = past_key_values[1] #", "self.model_tester.num_hidden_layers) self.assertListEqual( list(decoder_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, seq_length, seq_length], ) def check_encoder_attentions_output(outputs): attentions", "x is None name = model.get_prefix_bias_name() assert name is None", "self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) if self.is_encoder_decoder: model = model_class(config) outputs =", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "self.is_encoder_decoder else 1), len(outputs)) self.assertEqual(model.config.output_hidden_states, True) check_encoder_attentions_output(outputs) @slow def test_saved_model_with_attentions_output(self):", "the tensor variable name cannot be changed # from decoder_input_ids", "Unless required by applicable law or agreed to in writing,", "TFLEDModelTester: config_cls = LEDConfig config_updates = {} hidden_act = \"gelu\"", "use_cache=True) output, past_key_values = outputs.to_tuple() past_key_values = past_key_values[1] # create", "tf.concat([input_ids, eos_tensor], axis=1) decoder_input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size) config =", "unittest.TestCase): all_model_classes = (TFLEDForConditionalGeneration, TFLEDModel) if is_tf_available() else () all_generative_model_classes", "TFLongformerSelfAttention # returns attention of shape [num_attention_heads, encoder_seq_length, self.attention_window +", "eos_tensor = tf.expand_dims(tf.constant([self.eos_token_id] * self.batch_size), 1) input_ids = tf.concat([input_ids, eos_tensor],", "_assert_tensors_equal(a, b, atol=1e-12, prefix=\"\"): \"\"\"If tensors not close, or a", "# IMO there is a problem with the signature check.", "outputs.encoder_global_attentions] self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) self.assertEqual(len(global_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, seq_length],", "encoder_attention_heads=self.num_attention_heads, decoder_attention_heads=self.num_attention_heads, encoder_ffn_dim=self.intermediate_size, decoder_ffn_dim=self.intermediate_size, dropout=self.hidden_dropout_prob, attention_dropout=self.attention_probs_dropout_prob, max_position_embeddings=self.max_position_embeddings, eos_token_ids=[2], bos_token_id=self.bos_token_id, pad_token_id=self.pad_token_id,", "1 # first forward pass outputs = model(input_ids, attention_mask=attention_mask, use_cache=True)", "Assertion error.\"\"\" if a is None and b is None:", "the specific language governing permissions and # limitations under the", "test_inference_no_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\").led # change to intended input here", "decoder_input_ids) global_attention_mask = tf.concat( [tf.zeros_like(input_ids)[:, :-1], tf.ones_like(input_ids)[:, -1:]], axis=-1, )", "applicable law or agreed to in writing, software # distributed", "self.hidden_dropout_prob = hidden_dropout_prob self.attention_probs_dropout_prob = attention_probs_dropout_prob self.max_position_embeddings = max_position_embeddings self.eos_token_id", "config.return_dict = True seq_length = self.model_tester.seq_length encoder_seq_length = self.model_tester.encoder_seq_length def", "Inc. team. All rights reserved. # # Licensed under the", "first forward pass outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) output, past_key_values", "self.seq_length + (self.attention_window - self.seq_length % self.attention_window) % self.attention_window )", "= model(**inputs_dict)[0] expected_shape = (1, 1024, model.config.vocab_size) self.assertEqual(output.shape, expected_shape) #", "= self.model_tester.encoder_seq_length def check_decoder_attentions_output(outputs): decoder_attentions = outputs.decoder_attentions self.assertEqual(len(decoder_attentions), self.model_tester.num_hidden_layers) self.assertListEqual(", "* [0, 31414, 232, 328, 740, 1140, 12695, 69]]) decoder_input_ids", "% self.attention_window) % self.attention_window ) def prepare_config_and_inputs_for_common(self): input_ids = ids_tensor([self.batch_size,", "tf.range(self.model_tester.seq_length)[None, :] < num_global_attn_indices, 1, inputs_dict[\"global_attention_mask\"], ) config.return_dict = True", "(1, 1024, model.config.vocab_size) self.assertEqual(output.shape, expected_shape) # change to expected output", "= (1, 1024, 768) self.assertEqual(output.shape, expected_shape) # change to expected", "limitations under the License. import unittest from transformers import LEDConfig,", "`test_hidden_states_output` tests self.encoder_seq_length = ( self.seq_length + (self.attention_window - self.seq_length", "is_tf_available() else () is_encoder_decoder = True test_pruning = False def", "= prepare_led_inputs_dict(config, input_ids, decoder_input_ids) global_attention_mask = tf.concat( [tf.zeros_like(input_ids)[:, :-1], tf.ones_like(input_ids)[:,", "tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) if decoder_attention_mask is None: decoder_attention_mask = tf.cast(tf.math.not_equal(decoder_input_ids,", "in writing, software # distributed under the License is distributed", "use_labels self.vocab_size = vocab_size self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers", "tf.cast(ids_tensor((self.batch_size, 3), 2), tf.int8) # append to next input_ids and", "= tf.concat([attention_mask, next_attn_mask], axis=-1) output_from_no_past = model(next_input_ids, attention_mask=next_attention_mask)[0] output_from_past =", "here expected_slice = tf.convert_to_tensor( [[33.6507, 6.4572, 16.8089], [5.8739, -2.4238, 11.2902],", "random_slice_idx] # test that outputs are equal for slice tf.debugging.assert_near(output_from_past_slice,", "model = model_class(config) assert isinstance(model.get_input_embeddings(), tf.keras.layers.Layer) x = model.get_output_layer_with_bias() assert", ") config.return_dict = True seq_length = self.model_tester.seq_length encoder_seq_length = self.model_tester.encoder_seq_length", "outputs = model(input_ids, attention_mask=attention_mask, use_cache=True) output, past_key_values = outputs.to_tuple() past_key_values", "TFLEDModelTest(TFModelTesterMixin, unittest.TestCase): all_model_classes = (TFLEDForConditionalGeneration, TFLEDModel) if is_tf_available() else ()", "test should pass!!! PVP: # IMO there is a problem", "<NAME>, <NAME>, <NAME> and The HuggingFace Inc. team. All rights", "expected_slice = tf.convert_to_tensor( [[2.3050, 2.8279, 0.6531], [-1.8457, -0.1455, -3.5661], [-1.0186,", "= outputs.to_tuple() past_key_values = past_key_values[1] # create hypothetical next token", "from .test_configuration_common import ConfigTester from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor if", "decoder_input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size) config = self.config_cls( vocab_size=self.vocab_size, d_model=self.hidden_size,", ") def check_encoder_attentions_output(outputs): attentions = [t.numpy() for t in outputs.encoder_attentions]", "LEDConfig config_updates = {} hidden_act = \"gelu\" def __init__( self,", "bos_token_id=0, attention_window=4, ): self.parent = parent self.batch_size = batch_size self.seq_length", "hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads self.intermediate_size = intermediate_size", "a BIG restrictions pass def _assert_tensors_equal(a, b, atol=1e-12, prefix=\"\"): \"\"\"If", "model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\") # change to intended input here input_ids", "ids_tensor([self.batch_size, self.seq_length], self.vocab_size) config = self.config_cls( vocab_size=self.vocab_size, d_model=self.hidden_size, encoder_layers=self.num_hidden_layers, decoder_layers=self.num_hidden_layers,", "self.use_labels = use_labels self.vocab_size = vocab_size self.hidden_size = hidden_size self.num_hidden_layers", "rtol=1e-3) def prepare_led_inputs_dict( config, input_ids, decoder_input_ids, attention_mask=None, decoder_attention_mask=None, ): if", "reason is that the tensor variable name cannot be changed", "True model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False)", "HuggingFace Inc. team. All rights reserved. # # Licensed under", "__init__( self, parent, batch_size=13, seq_length=7, is_training=True, use_labels=False, vocab_size=99, hidden_size=32, num_hidden_layers=5,", "= pad_token_id self.bos_token_id = bos_token_id self.attention_window = attention_window # `ModelTesterMixin.test_attention_outputs`", "True) check_encoder_attentions_output(outputs) @slow def test_saved_model_with_attentions_output(self): # longformer has special attentions", "31414, 232, 328, 740, 1140, 12695, 69]]) decoder_input_ids = _long_tensor([128", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "change to expected output here expected_slice = tf.convert_to_tensor( [[33.6507, 6.4572,", "outputs are equal for slice tf.debugging.assert_near(output_from_past_slice, output_from_no_past_slice, rtol=1e-3) def prepare_led_inputs_dict(", "License, Version 2.0 (the \"License\"); # you may not use", "there is a problem with the signature check. # Test", "# You may obtain a copy of the License at", "[self.model_tester.num_attention_heads, encoder_seq_length, seq_length], ) self.assertListEqual( list(global_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, num_global_attn_indices], )", "vocab_size self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads", "self.vocab_size = vocab_size self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads", "return True raise except Exception: msg = \"{} != {}\".format(a,", "False model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) out_len =", "= 2 inputs_dict[\"global_attention_mask\"] = tf.where( tf.range(self.model_tester.seq_length)[None, :] < num_global_attn_indices, 1,", "slice tf.debugging.assert_near(output_from_past_slice, output_from_no_past_slice, rtol=1e-3) def prepare_led_inputs_dict( config, input_ids, decoder_input_ids, attention_mask=None,", "list(decoder_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, seq_length, seq_length], ) def check_encoder_attentions_output(outputs): attentions = [t.numpy()", "(2 if self.is_encoder_decoder else 1), len(outputs)) self.assertEqual(model.config.output_hidden_states, True) check_encoder_attentions_output(outputs) @slow", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "decoder_ffn_dim=self.intermediate_size, dropout=self.hidden_dropout_prob, attention_dropout=self.attention_probs_dropout_prob, max_position_embeddings=self.max_position_embeddings, eos_token_ids=[2], bos_token_id=self.bos_token_id, pad_token_id=self.pad_token_id, decoder_start_token_id=self.pad_token_id, attention_window=self.attention_window, **self.config_updates,", "_long_tensor(tok_lst): return tf.constant(tok_lst, dtype=tf.int32) TOLERANCE = 1e-4 @slow @require_tf class", "def test_model_common_attributes(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes:", "model_class)) out_len = len(outputs) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) if self.is_encoder_decoder: model", "self.hidden_size = hidden_size self.num_hidden_layers = num_hidden_layers self.num_attention_heads = num_attention_heads self.intermediate_size", "decoder_attention_mask = tf.cast(tf.math.not_equal(decoder_input_ids, config.pad_token_id), tf.int8) return { \"input_ids\": input_ids, \"attention_mask\":", "next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size) next_attn_mask = tf.cast(ids_tensor((self.batch_size, 3), 2),", "self.assertListEqual( list(attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, seq_length], ) self.assertListEqual( list(global_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length,", "encoder_ffn_dim=self.intermediate_size, decoder_ffn_dim=self.intermediate_size, dropout=self.hidden_dropout_prob, attention_dropout=self.attention_probs_dropout_prob, max_position_embeddings=self.max_position_embeddings, eos_token_ids=[2], bos_token_id=self.bos_token_id, pad_token_id=self.pad_token_id, decoder_start_token_id=self.pad_token_id, attention_window=self.attention_window,", "self.encoder_seq_length = ( self.seq_length + (self.attention_window - self.seq_length % self.attention_window)", "else () all_generative_model_classes = (TFLEDForConditionalGeneration,) if is_tf_available() else () is_encoder_decoder", "config.output_hidden_states = False model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class))", "different from `seq_length`. Relevant for # the `test_attention_outputs` and `test_hidden_states_output`", "pass @slow def test_saved_model_with_hidden_states_output(self): # TODO(JPLU, PVP) this test should", "= (TFLEDForConditionalGeneration, TFLEDModel) if is_tf_available() else () all_generative_model_classes = (TFLEDForConditionalGeneration,)", "= self.config_cls( vocab_size=self.vocab_size, d_model=self.hidden_size, encoder_layers=self.num_hidden_layers, decoder_layers=self.num_hidden_layers, encoder_attention_heads=self.num_attention_heads, decoder_attention_heads=self.num_attention_heads, encoder_ffn_dim=self.intermediate_size, decoder_ffn_dim=self.intermediate_size,", "t in outputs.encoder_attentions] global_attentions = [t.numpy() for t in outputs.encoder_global_attentions]", "num_global_attn_indices = 2 inputs_dict[\"global_attention_mask\"] = tf.where( tf.range(self.model_tester.seq_length)[None, :] < num_global_attn_indices,", "def check_encoder_attentions_output(outputs): attentions = [t.numpy() for t in outputs.encoder_attentions] global_attentions", "decoder_attention_mask=None, ): if attention_mask is None: attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id),", "the License for the specific language governing permissions and #", "config.output_attentions = True model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class))", "attention is always last and order is fine inputs_dict[\"output_attentions\"] =", "restrictions pass def _assert_tensors_equal(a, b, atol=1e-12, prefix=\"\"): \"\"\"If tensors not", "equal for slice tf.debugging.assert_near(output_from_past_slice, output_from_no_past_slice, rtol=1e-3) def prepare_led_inputs_dict( config, input_ids,", "Apache License, Version 2.0 (the \"License\"); # you may not", "if is_tf_available() else () all_generative_model_classes = (TFLEDForConditionalGeneration,) if is_tf_available() else", "def _long_tensor(tok_lst): return tf.constant(tok_lst, dtype=tf.int32) TOLERANCE = 1e-4 @slow @require_tf", "be changed # from decoder_input_ids -> input_ids, which poses a", "is different from `seq_length`. Relevant for # the `test_attention_outputs` and", "either express or implied. # See the License for the", "last and order is fine inputs_dict[\"output_attentions\"] = True config.output_hidden_states =", "decoder_input_ids) output = model(**inputs_dict)[0] expected_shape = (1, 1024, 768) self.assertEqual(output.shape,", "from transformers import LEDConfig, is_tf_available from transformers.testing_utils import require_tf, slow", "License. import unittest from transformers import LEDConfig, is_tf_available from transformers.testing_utils", "self.all_model_classes: inputs_dict[\"output_attentions\"] = True inputs_dict[\"use_cache\"] = False config.output_hidden_states = False", "for t in outputs.encoder_attentions] global_attentions = [t.numpy() for t in", "of size # [num_attention_heads, encoder_seq_length, encoder_key_length], but TFLongformerSelfAttention # returns", "self.config_cls( vocab_size=self.vocab_size, d_model=self.hidden_size, encoder_layers=self.num_hidden_layers, decoder_layers=self.num_hidden_layers, encoder_attention_heads=self.num_attention_heads, decoder_attention_heads=self.num_attention_heads, encoder_ffn_dim=self.intermediate_size, decoder_ffn_dim=self.intermediate_size, dropout=self.hidden_dropout_prob,", "hidden_size=32, num_hidden_layers=5, num_attention_heads=4, intermediate_size=37, hidden_dropout_prob=0.1, attention_probs_dropout_prob=0.1, max_position_embeddings=20, eos_token_id=2, pad_token_id=1, bos_token_id=0,", "= TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\") # change to intended input here input_ids =", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "that output attentions can also be changed via the config", "is always last and order is fine inputs_dict[\"output_attentions\"] = True", "attends to `self.attention_window` and one before and one after self.key_length", "encoder_layers=self.num_hidden_layers, decoder_layers=self.num_hidden_layers, encoder_attention_heads=self.num_attention_heads, decoder_attention_heads=self.num_attention_heads, encoder_ffn_dim=self.intermediate_size, decoder_ffn_dim=self.intermediate_size, dropout=self.hidden_dropout_prob, attention_dropout=self.attention_probs_dropout_prob, max_position_embeddings=self.max_position_embeddings, eos_token_ids=[2],", "prefix: msg = prefix + \": \" + msg raise", "= TFLEDModelTester(self) self.config_tester = ConfigTester(self, config_class=LEDConfig) def test_config(self): self.config_tester.run_common_tests() def", "= model(next_input_ids, attention_mask=next_attention_mask)[0] output_from_past = model(next_tokens, attention_mask=next_attention_mask, past_key_values=past_key_values)[0] self.parent.assertEqual(next_tokens.shape[1], output_from_past.shape[1])", "num_global_attn_indices], ) for model_class in self.all_model_classes: inputs_dict[\"output_attentions\"] = True inputs_dict[\"use_cache\"]", "= tf.zeros_like(inputs_dict[\"attention_mask\"]) num_global_attn_indices = 2 inputs_dict[\"global_attention_mask\"] = tf.where( tf.range(self.model_tester.seq_length)[None, :]", "select random slice random_slice_idx = int(ids_tensor((1,), output_from_past.shape[-1])) output_from_no_past_slice = output_from_no_past[:,", "\" + msg raise AssertionError(msg) def _long_tensor(tok_lst): return tf.constant(tok_lst, dtype=tf.int32)", "[0, 31414, 232, 328, 740, 1140, 12695, 69]]) inputs_dict =", "pad_token_id=self.pad_token_id, decoder_start_token_id=self.pad_token_id, attention_window=self.attention_window, **self.config_updates, ) inputs_dict = prepare_led_inputs_dict(config, input_ids, decoder_input_ids)", "TFLEDModel @require_tf class TFLEDModelTester: config_cls = LEDConfig config_updates = {}", "encoder_seq_length = self.model_tester.encoder_seq_length def check_decoder_attentions_output(outputs): decoder_attentions = outputs.decoder_attentions self.assertEqual(len(decoder_attentions), self.model_tester.num_hidden_layers)", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "= max_position_embeddings self.eos_token_id = eos_token_id self.pad_token_id = pad_token_id self.bos_token_id =", "global_attentions = [t.numpy() for t in outputs.encoder_global_attentions] self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) self.assertEqual(len(global_attentions),", "the License. import unittest from transformers import LEDConfig, is_tf_available from", "{}\".format(a, b) if prefix: msg = prefix + \": \"", "axis=-1, ) inputs_dict[\"global_attention_mask\"] = global_attention_mask return config, inputs_dict def check_decoder_model_past_large_inputs(self,", "self.pad_token_id = pad_token_id self.bos_token_id = bos_token_id self.attention_window = attention_window #", "= self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs) def test_model_common_attributes(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() for", "self.model_tester.encoder_seq_length def check_decoder_attentions_output(outputs): decoder_attentions = outputs.decoder_attentions self.assertEqual(len(decoder_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(decoder_attentions[0].shape[-3:]),", "check_decoder_attentions_output(outputs) # Check that output attentions can also be changed", "tf.where( tf.range(self.model_tester.seq_length)[None, :] < num_global_attn_indices, 1, inputs_dict[\"global_attention_mask\"], ) config.return_dict =", "Check attention is always last and order is fine inputs_dict[\"output_attentions\"]", "input_ids = input_ids[:1, :] attention_mask = inputs_dict[\"attention_mask\"][:1, :] self.batch_size =", "-> input_ids, which poses a BIG restrictions pass def _assert_tensors_equal(a,", "if a is None and b is None: return True", "a problem with the signature check. # Test passes for", "12695, 69]]) inputs_dict = prepare_led_inputs_dict(model.config, input_ids, decoder_input_ids) output = model(**inputs_dict)[0]", "for model_class in self.all_model_classes: model = model_class(config) assert isinstance(model.get_input_embeddings(), tf.keras.layers.Layer)", "self.assertEqual(out_len + (2 if self.is_encoder_decoder else 1), len(outputs)) self.assertEqual(model.config.output_hidden_states, True)", "is None def test_attention_outputs(self): config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() inputs_dict[\"global_attention_mask\"] =", "self.is_encoder_decoder: model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False)", "def test_decoder_model_past_large_inputs(self): config_and_inputs = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs) def test_model_common_attributes(self): config, inputs_dict", "from transformers.testing_utils import require_tf, slow from .test_configuration_common import ConfigTester from", "`ModelTesterMixin.test_attention_outputs` is expecting attention tensors to be of size #", "len(outputs) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) if self.is_encoder_decoder: model = model_class(config) outputs", "output_from_no_past = model(next_input_ids, attention_mask=next_attention_mask)[0] output_from_past = model(next_tokens, attention_mask=next_attention_mask, past_key_values=past_key_values)[0] self.parent.assertEqual(next_tokens.shape[1],", "encoder_seq_length, seq_length], ) self.assertListEqual( list(global_attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, num_global_attn_indices], ) for", "for TFLEDModel, but not for TFLEDForConditionalGeneration # IMO the reason", "False def setUp(self): self.model_tester = TFLEDModelTester(self) self.config_tester = ConfigTester(self, config_class=LEDConfig)", "from transformers import TFLEDForConditionalGeneration, TFLEDModel @require_tf class TFLEDModelTester: config_cls =", "True test_pruning = False def setUp(self): self.model_tester = TFLEDModelTester(self) self.config_tester", "True try: if tf.debugging.assert_near(a, b, atol=atol): return True raise except", "next_input_ids next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size) next_attn_mask = tf.cast(ids_tensor((self.batch_size, 3),", "eos_token_ids=[2], bos_token_id=self.bos_token_id, pad_token_id=self.pad_token_id, decoder_start_token_id=self.pad_token_id, attention_window=self.attention_window, **self.config_updates, ) inputs_dict = prepare_led_inputs_dict(config,", "`test_attention_outputs` and `test_hidden_states_output` tests self.encoder_seq_length = ( self.seq_length + (self.attention_window", "inputs_dict[\"global_attention_mask\"] = tf.where( tf.range(self.model_tester.seq_length)[None, :] < num_global_attn_indices, 1, inputs_dict[\"global_attention_mask\"], )", "past_key_values[1] # create hypothetical next token and extent to next_input_ids", "model(self._prepare_for_class(inputs_dict, model_class)) out_len = len(outputs) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) if self.is_encoder_decoder:", "\"License\"); # you may not use this file except in", "config = self.config_cls( vocab_size=self.vocab_size, d_model=self.hidden_size, encoder_layers=self.num_hidden_layers, decoder_layers=self.num_hidden_layers, encoder_attention_heads=self.num_attention_heads, decoder_attention_heads=self.num_attention_heads, encoder_ffn_dim=self.intermediate_size,", "axis=-1) output_from_no_past = model(next_input_ids, attention_mask=next_attention_mask)[0] output_from_past = model(next_tokens, attention_mask=next_attention_mask, past_key_values=past_key_values)[0]", "for model_class in self.all_model_classes: inputs_dict[\"output_attentions\"] = True inputs_dict[\"use_cache\"] = False", "permissions and # limitations under the License. import unittest from", "= True inputs_dict[\"use_cache\"] = False config.output_hidden_states = False model =", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "= model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_decoder_attentions_output(outputs) #", "self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) # Check attention is always last and", "False config.output_hidden_states = False model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict,", "= prefix + \": \" + msg raise AssertionError(msg) def", "self.max_position_embeddings = max_position_embeddings self.eos_token_id = eos_token_id self.pad_token_id = pad_token_id self.bos_token_id", "and b is None: return True try: if tf.debugging.assert_near(a, b,", "poses a BIG restrictions pass def _assert_tensors_equal(a, b, atol=1e-12, prefix=\"\"):", "decoder_input_ids, attention_mask=None, decoder_attention_mask=None, ): if attention_mask is None: attention_mask =", "self.batch_size = batch_size self.seq_length = seq_length self.is_training = is_training self.use_labels", "inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() inputs_dict[\"global_attention_mask\"] = tf.zeros_like(inputs_dict[\"attention_mask\"]) num_global_attn_indices = 2 inputs_dict[\"global_attention_mask\"]", ".test_modeling_tf_common import TFModelTesterMixin, ids_tensor if is_tf_available(): import tensorflow as tf", "model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) # Check", "a nice Assertion error.\"\"\" if a is None and b", "class TFLEDModelIntegrationTest(unittest.TestCase): def test_inference_no_head(self): model = TFLEDForConditionalGeneration.from_pretrained(\"allenai/led-base-16384\").led # change to", "# distributed under the License is distributed on an \"AS", "import tensorflow as tf from transformers import TFLEDForConditionalGeneration, TFLEDModel @require_tf", "# Unless required by applicable law or agreed to in", "232, 328, 740, 1140, 12695, 69]]) inputs_dict = prepare_led_inputs_dict(model.config, input_ids,", "= bos_token_id self.attention_window = attention_window # `ModelTesterMixin.test_attention_outputs` is expecting attention", "decoder_layers=self.num_hidden_layers, encoder_attention_heads=self.num_attention_heads, decoder_attention_heads=self.num_attention_heads, encoder_ffn_dim=self.intermediate_size, decoder_ffn_dim=self.intermediate_size, dropout=self.hidden_dropout_prob, attention_dropout=self.attention_probs_dropout_prob, max_position_embeddings=self.max_position_embeddings, eos_token_ids=[2], bos_token_id=self.bos_token_id,", "768) self.assertEqual(output.shape, expected_shape) # change to expected output here expected_slice", "self.model_tester.prepare_config_and_inputs_for_common() inputs_dict[\"global_attention_mask\"] = tf.zeros_like(inputs_dict[\"attention_mask\"]) num_global_attn_indices = 2 inputs_dict[\"global_attention_mask\"] = tf.where(", "[-1.0186, 0.4586, -2.2043]], ) tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=TOLERANCE) def", "= seq_length self.is_training = is_training self.use_labels = use_labels self.vocab_size =", "TFLEDModelTester(self) self.config_tester = ConfigTester(self, config_class=LEDConfig) def test_config(self): self.config_tester.run_common_tests() def test_decoder_model_past_large_inputs(self):", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "1), len(outputs)) self.assertEqual(model.config.output_hidden_states, True) check_encoder_attentions_output(outputs) @slow def test_saved_model_with_attentions_output(self): # longformer", "prepare_led_inputs_dict(config, input_ids, decoder_input_ids) global_attention_mask = tf.concat( [tf.zeros_like(input_ids)[:, :-1], tf.ones_like(input_ids)[:, -1:]],", "assert isinstance(model.get_input_embeddings(), tf.keras.layers.Layer) x = model.get_output_layer_with_bias() assert x is None", "global_attention_mask return config, inputs_dict def check_decoder_model_past_large_inputs(self, config, inputs_dict): model =", "TFLEDForConditionalGeneration # IMO the reason is that the tensor variable", "< num_global_attn_indices, 1, inputs_dict[\"global_attention_mask\"], ) config.return_dict = True seq_length =", "len(outputs)) self.assertEqual(model.config.output_hidden_states, True) check_encoder_attentions_output(outputs) @slow def test_saved_model_with_attentions_output(self): # longformer has", "You may obtain a copy of the License at #", "to be of size # [num_attention_heads, encoder_seq_length, encoder_key_length], but TFLongformerSelfAttention", "= ids_tensor([self.batch_size, self.seq_length - 1], self.vocab_size) eos_tensor = tf.expand_dims(tf.constant([self.eos_token_id] *", "changed # from decoder_input_ids -> input_ids, which poses a BIG", "): self.parent = parent self.batch_size = batch_size self.seq_length = seq_length", "* [0, 31414, 232, 328, 740, 1140, 12695, 69]]) inputs_dict", "+ 1 # because of padding `encoder_seq_length`, is different from", "test_config(self): self.config_tester.run_common_tests() def test_decoder_model_past_large_inputs(self): config_and_inputs = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.check_decoder_model_past_large_inputs(*config_and_inputs) def test_model_common_attributes(self):", "`seq_length`. Relevant for # the `test_attention_outputs` and `test_hidden_states_output` tests self.encoder_seq_length", "token and extent to next_input_ids next_tokens = ids_tensor((self.batch_size, 3), config.vocab_size)", "model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(out_len + (2 if self.is_encoder_decoder else 1), len(outputs))", "if self.is_encoder_decoder else 1), len(outputs)) self.assertEqual(model.config.output_hidden_states, True) check_encoder_attentions_output(outputs) @slow def", "input_ids[:1, :] attention_mask = inputs_dict[\"attention_mask\"][:1, :] self.batch_size = 1 #", "input_ids, which poses a BIG restrictions pass def _assert_tensors_equal(a, b,", "self.assertEqual(len(global_attentions), self.model_tester.num_hidden_layers) self.assertListEqual( list(attentions[0].shape[-3:]), [self.model_tester.num_attention_heads, encoder_seq_length, seq_length], ) self.assertListEqual( list(global_attentions[0].shape[-3:]),", "1024, model.config.vocab_size) self.assertEqual(output.shape, expected_shape) # change to expected output here", "740, 1140, 12695, 69]]) inputs_dict = prepare_led_inputs_dict(model.config, input_ids, decoder_input_ids) output", "the Apache License, Version 2.0 (the \"License\"); # you may", "# limitations under the License. import unittest from transformers import", "True model = model_class(config) outputs = model(self._prepare_for_class(inputs_dict, model_class)) self.assertEqual(out_len +", "in self.all_model_classes: model = model_class(config) assert isinstance(model.get_input_embeddings(), tf.keras.layers.Layer) x =", "its local attention only attends to `self.attention_window` and one before", "if attention_mask is None: attention_mask = tf.cast(tf.math.not_equal(input_ids, config.pad_token_id), tf.int8) if", "attentions can also be changed via the config del inputs_dict[\"output_attentions\"]", "out_len = len(outputs) self.assertEqual(config.output_hidden_states, False) check_encoder_attentions_output(outputs) if self.is_encoder_decoder: model =", "2.8279, 0.6531], [-1.8457, -0.1455, -3.5661], [-1.0186, 0.4586, -2.2043]], ) tf.debugging.assert_near(output[:,", "local attention only attends to `self.attention_window` and one before and" ]
[ "x: np.sin(np.pi*x)), ] enabled_act_functions = available_act_functions def get_measurements(self, weights, x,", "scores in x. Returns: softmax - softmax normalized in dim", "y_raw y_pred = np.argmax(y_raw, axis=-1) y_prob = softmax(y_raw, axis=-1) if", "np.abs(x)), ('cos', lambda x: np.cos(np.pi*x)), ('sin ', lambda x: np.sin(np.pi*x)),", "return function names available_funcs[func][0] for func in selected_funcs ]) class", "return_values['mse_loss'] = np.array([ sklearn.metrics.mean_squared_error(y_true, raw) for raw in y_raw ])", "logging from wann_genetic.individual.network_base import BaseFFNN def softmax(x, axis=-1): \"\"\"Compute softmax", "= 1 # bias # propagate signal through all layers", "pred in y_pred ]) return return_values def activation_functions(self, nodes, x=None):", "x: np.tanh(x)), ('gaussian (standard)', lambda x: np.exp(-np.multiply(x, x) / 2.0)),", "y_raw = act_vec[..., -self.n_out:] return self.measurements_from_output(y_raw, y_true, measures) def measurements_from_output(self,", "\"\"\"Apply updates for active nodes (active nodes can't share edges).", "np.exp(-np.multiply(x, x) / 2.0)), ('step', lambda x: 1.0*(x>0.0)), ('identity', lambda", "np.array([ sklearn.metrics.cohen_kappa_score(y_true, pred) for pred in y_pred ]) return return_values", "y_raw = y_raw.reshape(y_raw.shape[0], -1, self.n_out) y_prob = y_prob.reshape(y_raw.shape[0], -1, self.n_out)", "with base weights M3d = M[None, :, :] * base_weights[:,", "act_sums = np.matmul(x3d, M3d) + add_to_sum # apply activation function", "np.array([ sklearn.metrics.mean_squared_error(y_true, raw) for raw in y_raw ]) if 'accuracy'", "the same function as the :class:`wann_genetic.individual.torch.ffn.MultiActivationModule`. \"\"\" if x is", "weights, x, y_true=None, measures=['predictions']): assert len(x.shape) == 2 # multiple", "samples, source nodes # M3d: weights, source, target # multiply", "return s def apply_act_function(available_funcs, selected_funcs, x=None): \"\"\"Apply the activation function", "y_pred ]) if 'kappa' in measures: return_values['kappa'] = np.array([ sklearn.metrics.cohen_kappa_score(y_true,", "isinstance(weights, np.ndarray) # initial activations act_vec = np.empty((weights.shape[0], x.shape[0], self.n_nodes),", "= softmax(y_raw, axis=-1) if 'probabilities' in measures: return_values['probabilities'] = y_prob", "is not None: result = np.empty(x.shape) for i, func in", "= x[..., :addend_nodes] act_sums = np.matmul(x3d, M3d) + add_to_sum #", "return self.measurements_from_output(y_raw, y_true, measures) def measurements_from_output(self, y_raw, y_true, measures): return_values", "import numpy as np import sklearn import logging from wann_genetic.individual.network_base", "2.0)), ('step', lambda x: 1.0*(x>0.0)), ('identity', lambda x: x), ('inverse',", "np.matmul(x3d, M3d) + add_to_sum # apply activation function for active", "x)), ('sigmoid', lambda x: (np.tanh(x/2.0) + 1.0)/2.0), ('tanh', lambda x:", "softmax(y_raw, axis=-1) if 'probabilities' in measures: return_values['probabilities'] = y_prob if", "propagation works, see :doc:`numpy_network`. \"\"\" # Definition of the activations", "= available_funcs[func][1](x[..., i]) return result else: return np.array([ # return", "self.n_in] = 1 # bias # propagate signal through all", "y_true is not None: y_true = y_true.reshape(-1) if 'log_loss' in", "lambda x: x), ('inverse', lambda x: -x), ('squared', lambda x:", "y_raw ]) if 'accuracy' in measures: return_values['accuracy'] = np.array([ sklearn.metrics.accuracy_score(y_true,", "return_values['predictions'] = y_pred y_raw = y_raw.reshape(y_raw.shape[0], -1, self.n_out) y_prob =", "\"\"\"Apply the activation function of the selected nodes to their", "= self.nodes['func'][nodes - self.offset] return apply_act_function(self.enabled_act_functions, funcs, x) def calc_act(self,", "# if any node is nan, we cant rely on", "weights, samples, source nodes # M3d: weights, source, target #", "dict() if 'raw' in measures: return_values['raw'] = y_raw y_pred =", "nodes (active nodes can't share edges). \"\"\" addend_nodes = active_nodes[0]", "predicted y_prob[~np.isfinite(y_prob)] = 0 return_values['log_loss'] = np.array([ sklearn.metrics.log_loss(y_true, prob, labels=np.arange(self.n_out))", "('inverse', lambda x: -x), ('squared', lambda x: x**2), # unstable", "sklearn.metrics.mean_squared_error(y_true, raw) for raw in y_raw ]) if 'accuracy' in", "result = np.empty(x.shape) for i, func in enumerate(selected_funcs): assert func", "sums. This fullfils the same function as the :class:`wann_genetic.individual.torch.ffn.MultiActivationModule`. \"\"\"", "Network(BaseFFNN): \"\"\"Numpy implmentation of a Feed Forward Neural Network For", "measures: return_values['raw'] = y_raw y_pred = np.argmax(y_raw, axis=-1) y_prob =", "]) if 'kappa' in measures: return_values['kappa'] = np.array([ sklearn.metrics.cohen_kappa_score(y_true, pred)", "M3d: weights, source, target # multiply relevant weight matrix with", "as np import sklearn import logging from wann_genetic.individual.network_base import BaseFFNN", "return_values['probabilities'] = y_prob if 'predictions' in measures: return_values['predictions'] = y_pred", "/ 2.0)), ('step', lambda x: 1.0*(x>0.0)), ('identity', lambda x: x),", "x=None): funcs = self.nodes['func'][nodes - self.offset] return apply_act_function(self.enabled_act_functions, funcs, x)", "# x3d: weights, samples, source nodes # M3d: weights, source,", "return result else: return np.array([ # return function names available_funcs[func][0]", "x=None): \"\"\"Apply the activation function of the selected nodes to", "their sums. This fullfils the same function as the :class:`wann_genetic.individual.torch.ffn.MultiActivationModule`.", "lambda x: np.sin(np.pi*x)), ] enabled_act_functions = available_act_functions def get_measurements(self, weights,", "# apply activation function for active nodes return self.activation_functions(active_nodes, act_sums)", "(standard)', lambda x: np.exp(-np.multiply(x, x) / 2.0)), ('step', lambda x:", "np.sin(np.pi*x)), ] enabled_act_functions = available_act_functions def get_measurements(self, weights, x, y_true=None,", "axis=-1) y_prob = softmax(y_raw, axis=-1) if 'probabilities' in measures: return_values['probabilities']", "implmentation of a Feed Forward Neural Network For an explanation", ":doc:`numpy_network`. \"\"\" # Definition of the activations functions available_act_functions =", "2 # multiple one dimensional input arrays assert isinstance(weights, np.ndarray)", "x. Returns: softmax - softmax normalized in dim axis \"\"\"", "of how propagation works, see :doc:`numpy_network`. \"\"\" # Definition of", "explanation of how propagation works, see :doc:`numpy_network`. \"\"\" # Definition", "x: np.exp(-np.multiply(x, x) / 2.0)), ('step', lambda x: 1.0*(x>0.0)), ('identity',", "is nan, we cant rely on the result valid =", "\"\"\" addend_nodes = active_nodes[0] M = self.weight_matrix[:addend_nodes, active_nodes - self.offset]", "]) if 'accuracy' in measures: return_values['accuracy'] = np.array([ sklearn.metrics.accuracy_score(y_true, pred)", "= M[None, :, :] * base_weights[:, None, None] x3d =", "e_x = np.exp(x - np.expand_dims(np.max(x,axis=axis), axis=axis)) s = (e_x /", "in measures: return_values['accuracy'] = np.array([ sklearn.metrics.accuracy_score(y_true, pred) for pred in", "the activations functions available_act_functions = [ ('relu', lambda x: np.maximum(0,", "self.n_nodes), dtype=float) act_vec[..., :self.n_in] = x[...] act_vec[..., self.n_in] = 1", "in x. Returns: softmax - softmax normalized in dim axis", "lambda x: (np.tanh(x/2.0) + 1.0)/2.0), ('tanh', lambda x: np.tanh(x)), ('gaussian", "('tanh', lambda x: np.tanh(x)), ('gaussian (standard)', lambda x: np.exp(-np.multiply(x, x)", "x3d = x[..., :addend_nodes] act_sums = np.matmul(x3d, M3d) + add_to_sum", "# return function names available_funcs[func][0] for func in selected_funcs ])", "\"\"\"Numpy implmentation of a Feed Forward Neural Network For an", ":addend_nodes] act_sums = np.matmul(x3d, M3d) + add_to_sum # apply activation", "axis=-1) if 'probabilities' in measures: return_values['probabilities'] = y_prob if 'predictions'", "x: x**2), # unstable if applied multiple times ('abs', lambda", "of the selected nodes to their sums. This fullfils the", "layers for active_nodes in self.layers(): act_vec[..., active_nodes] = self.calc_act(act_vec, active_nodes,", "in enumerate(selected_funcs): assert func < len(available_funcs) result[..., i] = available_funcs[func][1](x[...,", "prob in y_prob ]) if 'mse_loss' in measures: return_values['mse_loss'] =", "not None: result = np.empty(x.shape) for i, func in enumerate(selected_funcs):", "lambda x: np.exp(-np.multiply(x, x) / 2.0)), ('step', lambda x: 1.0*(x>0.0)),", "x: (np.tanh(x/2.0) + 1.0)/2.0), ('tanh', lambda x: np.tanh(x)), ('gaussian (standard)',", "dimensional input arrays assert isinstance(weights, np.ndarray) # initial activations act_vec", "in y_raw ]) if 'accuracy' in measures: return_values['accuracy'] = np.array([", "x, active_nodes, base_weights, add_to_sum=0): \"\"\"Apply updates for active nodes (active", "= available_act_functions def get_measurements(self, weights, x, y_true=None, measures=['predictions']): assert len(x.shape)", "source, target # multiply relevant weight matrix with base weights", "sklearn.metrics.cohen_kappa_score(y_true, pred) for pred in y_pred ]) return return_values def", "multiple one dimensional input arrays assert isinstance(weights, np.ndarray) # initial", "axis=-1) act_vec[~valid, :] = np.nan y_raw = act_vec[..., -self.n_out:] return", "active nodes (active nodes can't share edges). \"\"\" addend_nodes =", "result[..., i] = available_funcs[func][1](x[..., i]) return result else: return np.array([", "act_vec = np.empty((weights.shape[0], x.shape[0], self.n_nodes), dtype=float) act_vec[..., :self.n_in] = x[...]", "if 'probabilities' in measures: return_values['probabilities'] = y_prob if 'predictions' in", "wann_genetic.individual.network_base import BaseFFNN def softmax(x, axis=-1): \"\"\"Compute softmax values for", "1.0*(x>0.0)), ('identity', lambda x: x), ('inverse', lambda x: -x), ('squared',", "= x[...] act_vec[..., self.n_in] = 1 # bias # propagate", "y_pred = y_pred.reshape(y_raw.shape[0], -1) if y_true is not None: y_true", "self.offset] return apply_act_function(self.enabled_act_functions, funcs, x) def calc_act(self, x, active_nodes, base_weights,", "active_nodes, base_weights, add_to_sum=0): \"\"\"Apply updates for active nodes (active nodes", "= y_prob.reshape(y_raw.shape[0], -1, self.n_out) y_pred = y_pred.reshape(y_raw.shape[0], -1) if y_true", "valid = np.all(~np.isnan(act_vec), axis=-1) act_vec[~valid, :] = np.nan y_raw =", "of scores in x. Returns: softmax - softmax normalized in", "len(x.shape) == 2 # multiple one dimensional input arrays assert", "selected nodes to their sums. This fullfils the same function", "None: y_true = y_true.reshape(-1) if 'log_loss' in measures: # nan", "y_prob ]) if 'mse_loss' in measures: return_values['mse_loss'] = np.array([ sklearn.metrics.mean_squared_error(y_true,", "funcs, x) def calc_act(self, x, active_nodes, base_weights, add_to_sum=0): \"\"\"Apply updates", "= np.array([ sklearn.metrics.mean_squared_error(y_true, raw) for raw in y_raw ]) if", "the result valid = np.all(~np.isnan(act_vec), axis=-1) act_vec[~valid, :] = np.nan", "-1, self.n_out) y_prob = y_prob.reshape(y_raw.shape[0], -1, self.n_out) y_pred = y_pred.reshape(y_raw.shape[0],", "M = self.weight_matrix[:addend_nodes, active_nodes - self.offset] # x3d: weights, samples,", "of the activations functions available_act_functions = [ ('relu', lambda x:", "if 'log_loss' in measures: # nan is same as maximally", "return return_values def activation_functions(self, nodes, x=None): funcs = self.nodes['func'][nodes -", "see :doc:`numpy_network`. \"\"\" # Definition of the activations functions available_act_functions", "add_to_sum # apply activation function for active nodes return self.activation_functions(active_nodes,", "base weights M3d = M[None, :, :] * base_weights[:, None,", "i] = available_funcs[func][1](x[..., i]) return result else: return np.array([ #", "== 2 # multiple one dimensional input arrays assert isinstance(weights,", "measures: # nan is same as maximally falsely predicted y_prob[~np.isfinite(y_prob)]", "in measures: return_values['raw'] = y_raw y_pred = np.argmax(y_raw, axis=-1) y_prob", "pred) for pred in y_pred ]) return return_values def activation_functions(self,", "for func in selected_funcs ]) class Network(BaseFFNN): \"\"\"Numpy implmentation of", "For an explanation of how propagation works, see :doc:`numpy_network`. \"\"\"", "('sin ', lambda x: np.sin(np.pi*x)), ] enabled_act_functions = available_act_functions def", "assert isinstance(weights, np.ndarray) # initial activations act_vec = np.empty((weights.shape[0], x.shape[0],", "x: -x), ('squared', lambda x: x**2), # unstable if applied", "axis \"\"\" e_x = np.exp(x - np.expand_dims(np.max(x,axis=axis), axis=axis)) s =", "- np.expand_dims(np.max(x,axis=axis), axis=axis)) s = (e_x / np.expand_dims(e_x.sum(axis=-1), axis=axis)) return", "numpy as np import sklearn import logging from wann_genetic.individual.network_base import", "y_true = y_true.reshape(-1) if 'log_loss' in measures: # nan is", "func in enumerate(selected_funcs): assert func < len(available_funcs) result[..., i] =", "= np.empty((weights.shape[0], x.shape[0], self.n_nodes), dtype=float) act_vec[..., :self.n_in] = x[...] act_vec[...,", "for pred in y_pred ]) if 'kappa' in measures: return_values['kappa']", "(active nodes can't share edges). \"\"\" addend_nodes = active_nodes[0] M", "activation function of the selected nodes to their sums. This", "functions available_act_functions = [ ('relu', lambda x: np.maximum(0, x)), ('sigmoid',", "normalized in dim axis \"\"\" e_x = np.exp(x - np.expand_dims(np.max(x,axis=axis),", "def calc_act(self, x, active_nodes, base_weights, add_to_sum=0): \"\"\"Apply updates for active", "if y_true is not None: y_true = y_true.reshape(-1) if 'log_loss'", "None] x3d = x[..., :addend_nodes] act_sums = np.matmul(x3d, M3d) +", "how propagation works, see :doc:`numpy_network`. \"\"\" # Definition of the", "if 'kappa' in measures: return_values['kappa'] = np.array([ sklearn.metrics.cohen_kappa_score(y_true, pred) for", "axis=axis)) return s def apply_act_function(available_funcs, selected_funcs, x=None): \"\"\"Apply the activation", "nodes to their sums. This fullfils the same function as", "+ 1.0)/2.0), ('tanh', lambda x: np.tanh(x)), ('gaussian (standard)', lambda x:", "axis=axis)) s = (e_x / np.expand_dims(e_x.sum(axis=-1), axis=axis)) return s def", "for pred in y_pred ]) return return_values def activation_functions(self, nodes,", "active_nodes, weights) # if any node is nan, we cant", "in dim axis \"\"\" e_x = np.exp(x - np.expand_dims(np.max(x,axis=axis), axis=axis))", "rely on the result valid = np.all(~np.isnan(act_vec), axis=-1) act_vec[~valid, :]", "any node is nan, we cant rely on the result", "activation_functions(self, nodes, x=None): funcs = self.nodes['func'][nodes - self.offset] return apply_act_function(self.enabled_act_functions,", "+ add_to_sum # apply activation function for active nodes return", "def activation_functions(self, nodes, x=None): funcs = self.nodes['func'][nodes - self.offset] return", "# nan is same as maximally falsely predicted y_prob[~np.isfinite(y_prob)] =", ":] = np.nan y_raw = act_vec[..., -self.n_out:] return self.measurements_from_output(y_raw, y_true,", "self.nodes['func'][nodes - self.offset] return apply_act_function(self.enabled_act_functions, funcs, x) def calc_act(self, x,", ":self.n_in] = x[...] act_vec[..., self.n_in] = 1 # bias #", "return_values def activation_functions(self, nodes, x=None): funcs = self.nodes['func'][nodes - self.offset]", "np.tanh(x)), ('gaussian (standard)', lambda x: np.exp(-np.multiply(x, x) / 2.0)), ('step',", "np import sklearn import logging from wann_genetic.individual.network_base import BaseFFNN def", "activations act_vec = np.empty((weights.shape[0], x.shape[0], self.n_nodes), dtype=float) act_vec[..., :self.n_in] =", ":, :] * base_weights[:, None, None] x3d = x[..., :addend_nodes]", "node is nan, we cant rely on the result valid", "enabled_act_functions = available_act_functions def get_measurements(self, weights, x, y_true=None, measures=['predictions']): assert", "selected_funcs ]) class Network(BaseFFNN): \"\"\"Numpy implmentation of a Feed Forward", "raw) for raw in y_raw ]) if 'accuracy' in measures:", "np.all(~np.isnan(act_vec), axis=-1) act_vec[~valid, :] = np.nan y_raw = act_vec[..., -self.n_out:]", "np.exp(x - np.expand_dims(np.max(x,axis=axis), axis=axis)) s = (e_x / np.expand_dims(e_x.sum(axis=-1), axis=axis))", "x: np.cos(np.pi*x)), ('sin ', lambda x: np.sin(np.pi*x)), ] enabled_act_functions =", "calc_act(self, x, active_nodes, base_weights, add_to_sum=0): \"\"\"Apply updates for active nodes", "]) if 'mse_loss' in measures: return_values['mse_loss'] = np.array([ sklearn.metrics.mean_squared_error(y_true, raw)", "available_act_functions def get_measurements(self, weights, x, y_true=None, measures=['predictions']): assert len(x.shape) ==", "y_prob = y_prob.reshape(y_raw.shape[0], -1, self.n_out) y_pred = y_pred.reshape(y_raw.shape[0], -1) if", "- self.offset] # x3d: weights, samples, source nodes # M3d:", "active_nodes in self.layers(): act_vec[..., active_nodes] = self.calc_act(act_vec, active_nodes, weights) #", "function of the selected nodes to their sums. This fullfils", "funcs = self.nodes['func'][nodes - self.offset] return apply_act_function(self.enabled_act_functions, funcs, x) def", "= [ ('relu', lambda x: np.maximum(0, x)), ('sigmoid', lambda x:", "x[...] act_vec[..., self.n_in] = 1 # bias # propagate signal", "self.layers(): act_vec[..., active_nodes] = self.calc_act(act_vec, active_nodes, weights) # if any", "= np.array([ sklearn.metrics.cohen_kappa_score(y_true, pred) for pred in y_pred ]) return", "(e_x / np.expand_dims(e_x.sum(axis=-1), axis=axis)) return s def apply_act_function(available_funcs, selected_funcs, x=None):", "measures: return_values['accuracy'] = np.array([ sklearn.metrics.accuracy_score(y_true, pred) for pred in y_pred", "y_prob if 'predictions' in measures: return_values['predictions'] = y_pred y_raw =", "of a Feed Forward Neural Network For an explanation of", "y_raw.reshape(y_raw.shape[0], -1, self.n_out) y_prob = y_prob.reshape(y_raw.shape[0], -1, self.n_out) y_pred =", "import logging from wann_genetic.individual.network_base import BaseFFNN def softmax(x, axis=-1): \"\"\"Compute", "in measures: return_values['mse_loss'] = np.array([ sklearn.metrics.mean_squared_error(y_true, raw) for raw in", "y_prob = softmax(y_raw, axis=-1) if 'probabilities' in measures: return_values['probabilities'] =", "# initial activations act_vec = np.empty((weights.shape[0], x.shape[0], self.n_nodes), dtype=float) act_vec[...,", "s = (e_x / np.expand_dims(e_x.sum(axis=-1), axis=axis)) return s def apply_act_function(available_funcs,", "share edges). \"\"\" addend_nodes = active_nodes[0] M = self.weight_matrix[:addend_nodes, active_nodes", "works, see :doc:`numpy_network`. \"\"\" # Definition of the activations functions", "'probabilities' in measures: return_values['probabilities'] = y_prob if 'predictions' in measures:", "x) def calc_act(self, x, active_nodes, base_weights, add_to_sum=0): \"\"\"Apply updates for", "result else: return np.array([ # return function names available_funcs[func][0] for", "('gaussian (standard)', lambda x: np.exp(-np.multiply(x, x) / 2.0)), ('step', lambda", "y_raw, y_true, measures): return_values = dict() if 'raw' in measures:", "addend_nodes = active_nodes[0] M = self.weight_matrix[:addend_nodes, active_nodes - self.offset] #", "unstable if applied multiple times ('abs', lambda x: np.abs(x)), ('cos',", "s def apply_act_function(available_funcs, selected_funcs, x=None): \"\"\"Apply the activation function of", "activations functions available_act_functions = [ ('relu', lambda x: np.maximum(0, x)),", "target # multiply relevant weight matrix with base weights M3d", "This fullfils the same function as the :class:`wann_genetic.individual.torch.ffn.MultiActivationModule`. \"\"\" if", "the :class:`wann_genetic.individual.torch.ffn.MultiActivationModule`. \"\"\" if x is not None: result =", "propagate signal through all layers for active_nodes in self.layers(): act_vec[...,", "weights) # if any node is nan, we cant rely", "relevant weight matrix with base weights M3d = M[None, :,", "= np.exp(x - np.expand_dims(np.max(x,axis=axis), axis=axis)) s = (e_x / np.expand_dims(e_x.sum(axis=-1),", "x is not None: result = np.empty(x.shape) for i, func", "np.nan y_raw = act_vec[..., -self.n_out:] return self.measurements_from_output(y_raw, y_true, measures) def", "= dict() if 'raw' in measures: return_values['raw'] = y_raw y_pred", "M3d = M[None, :, :] * base_weights[:, None, None] x3d", "\"\"\" # Definition of the activations functions available_act_functions = [", "', lambda x: np.sin(np.pi*x)), ] enabled_act_functions = available_act_functions def get_measurements(self,", "apply_act_function(available_funcs, selected_funcs, x=None): \"\"\"Apply the activation function of the selected", "= np.matmul(x3d, M3d) + add_to_sum # apply activation function for", "bias # propagate signal through all layers for active_nodes in", "x.shape[0], self.n_nodes), dtype=float) act_vec[..., :self.n_in] = x[...] act_vec[..., self.n_in] =", "-1) if y_true is not None: y_true = y_true.reshape(-1) if", "<filename>src/wann_genetic/individual/numpy/ffnn.py import numpy as np import sklearn import logging from", "nodes, x=None): funcs = self.nodes['func'][nodes - self.offset] return apply_act_function(self.enabled_act_functions, funcs,", "initial activations act_vec = np.empty((weights.shape[0], x.shape[0], self.n_nodes), dtype=float) act_vec[..., :self.n_in]", "= np.argmax(y_raw, axis=-1) y_prob = softmax(y_raw, axis=-1) if 'probabilities' in", "active_nodes] = self.calc_act(act_vec, active_nodes, weights) # if any node is", "np.maximum(0, x)), ('sigmoid', lambda x: (np.tanh(x/2.0) + 1.0)/2.0), ('tanh', lambda", "i]) return result else: return np.array([ # return function names", "('step', lambda x: 1.0*(x>0.0)), ('identity', lambda x: x), ('inverse', lambda", "act_vec[..., active_nodes] = self.calc_act(act_vec, active_nodes, weights) # if any node", "dim axis \"\"\" e_x = np.exp(x - np.expand_dims(np.max(x,axis=axis), axis=axis)) s", "('abs', lambda x: np.abs(x)), ('cos', lambda x: np.cos(np.pi*x)), ('sin ',", "pred) for pred in y_pred ]) if 'kappa' in measures:", "return np.array([ # return function names available_funcs[func][0] for func in", "cant rely on the result valid = np.all(~np.isnan(act_vec), axis=-1) act_vec[~valid,", "= active_nodes[0] M = self.weight_matrix[:addend_nodes, active_nodes - self.offset] # x3d:", "softmax(x, axis=-1): \"\"\"Compute softmax values for each sets of scores", "each sets of scores in x. Returns: softmax - softmax", "lambda x: x**2), # unstable if applied multiple times ('abs',", "y_true=None, measures=['predictions']): assert len(x.shape) == 2 # multiple one dimensional", "y_prob.reshape(y_raw.shape[0], -1, self.n_out) y_pred = y_pred.reshape(y_raw.shape[0], -1) if y_true is", "in y_prob ]) if 'mse_loss' in measures: return_values['mse_loss'] = np.array([", "measures: return_values['probabilities'] = y_prob if 'predictions' in measures: return_values['predictions'] =", "for raw in y_raw ]) if 'accuracy' in measures: return_values['accuracy']", "]) return return_values def activation_functions(self, nodes, x=None): funcs = self.nodes['func'][nodes", "= np.empty(x.shape) for i, func in enumerate(selected_funcs): assert func <", "in measures: return_values['predictions'] = y_pred y_raw = y_raw.reshape(y_raw.shape[0], -1, self.n_out)", "if any node is nan, we cant rely on the", "through all layers for active_nodes in self.layers(): act_vec[..., active_nodes] =", "function as the :class:`wann_genetic.individual.torch.ffn.MultiActivationModule`. \"\"\" if x is not None:", "'log_loss' in measures: # nan is same as maximally falsely", "act_vec[..., self.n_in] = 1 # bias # propagate signal through", "is not None: y_true = y_true.reshape(-1) if 'log_loss' in measures:", "BaseFFNN def softmax(x, axis=-1): \"\"\"Compute softmax values for each sets", "] enabled_act_functions = available_act_functions def get_measurements(self, weights, x, y_true=None, measures=['predictions']):", "class Network(BaseFFNN): \"\"\"Numpy implmentation of a Feed Forward Neural Network", "nodes # M3d: weights, source, target # multiply relevant weight", "# M3d: weights, source, target # multiply relevant weight matrix", "x[..., :addend_nodes] act_sums = np.matmul(x3d, M3d) + add_to_sum # apply", "dtype=float) act_vec[..., :self.n_in] = x[...] act_vec[..., self.n_in] = 1 #", "= np.array([ sklearn.metrics.log_loss(y_true, prob, labels=np.arange(self.n_out)) for prob in y_prob ])", "all layers for active_nodes in self.layers(): act_vec[..., active_nodes] = self.calc_act(act_vec,", "= y_raw.reshape(y_raw.shape[0], -1, self.n_out) y_prob = y_prob.reshape(y_raw.shape[0], -1, self.n_out) y_pred", "measures) def measurements_from_output(self, y_raw, y_true, measures): return_values = dict() if", "M[None, :, :] * base_weights[:, None, None] x3d = x[...,", "softmax - softmax normalized in dim axis \"\"\" e_x =", "axis=-1): \"\"\"Compute softmax values for each sets of scores in", "# Definition of the activations functions available_act_functions = [ ('relu',", "x: 1.0*(x>0.0)), ('identity', lambda x: x), ('inverse', lambda x: -x),", "'accuracy' in measures: return_values['accuracy'] = np.array([ sklearn.metrics.accuracy_score(y_true, pred) for pred", "np.cos(np.pi*x)), ('sin ', lambda x: np.sin(np.pi*x)), ] enabled_act_functions = available_act_functions", "in y_pred ]) if 'kappa' in measures: return_values['kappa'] = np.array([", "lambda x: np.cos(np.pi*x)), ('sin ', lambda x: np.sin(np.pi*x)), ] enabled_act_functions", "y_pred y_raw = y_raw.reshape(y_raw.shape[0], -1, self.n_out) y_prob = y_prob.reshape(y_raw.shape[0], -1,", "active_nodes[0] M = self.weight_matrix[:addend_nodes, active_nodes - self.offset] # x3d: weights,", "# multiply relevant weight matrix with base weights M3d =", "def measurements_from_output(self, y_raw, y_true, measures): return_values = dict() if 'raw'", "-1, self.n_out) y_pred = y_pred.reshape(y_raw.shape[0], -1) if y_true is not", "< len(available_funcs) result[..., i] = available_funcs[func][1](x[..., i]) return result else:", "act_vec[..., :self.n_in] = x[...] act_vec[..., self.n_in] = 1 # bias", "return apply_act_function(self.enabled_act_functions, funcs, x) def calc_act(self, x, active_nodes, base_weights, add_to_sum=0):", "for i, func in enumerate(selected_funcs): assert func < len(available_funcs) result[...,", "result valid = np.all(~np.isnan(act_vec), axis=-1) act_vec[~valid, :] = np.nan y_raw", "weight matrix with base weights M3d = M[None, :, :]", "'raw' in measures: return_values['raw'] = y_raw y_pred = np.argmax(y_raw, axis=-1)", "self.weight_matrix[:addend_nodes, active_nodes - self.offset] # x3d: weights, samples, source nodes", "np.ndarray) # initial activations act_vec = np.empty((weights.shape[0], x.shape[0], self.n_nodes), dtype=float)", "return_values['log_loss'] = np.array([ sklearn.metrics.log_loss(y_true, prob, labels=np.arange(self.n_out)) for prob in y_prob", "x3d: weights, samples, source nodes # M3d: weights, source, target", "None: result = np.empty(x.shape) for i, func in enumerate(selected_funcs): assert", "as the :class:`wann_genetic.individual.torch.ffn.MultiActivationModule`. \"\"\" if x is not None: result", "- softmax normalized in dim axis \"\"\" e_x = np.exp(x", "fullfils the same function as the :class:`wann_genetic.individual.torch.ffn.MultiActivationModule`. \"\"\" if x", "for each sets of scores in x. Returns: softmax -", "np.expand_dims(np.max(x,axis=axis), axis=axis)) s = (e_x / np.expand_dims(e_x.sum(axis=-1), axis=axis)) return s", "x) / 2.0)), ('step', lambda x: 1.0*(x>0.0)), ('identity', lambda x:", "self.measurements_from_output(y_raw, y_true, measures) def measurements_from_output(self, y_raw, y_true, measures): return_values =", "to their sums. This fullfils the same function as the", "assert len(x.shape) == 2 # multiple one dimensional input arrays", "in selected_funcs ]) class Network(BaseFFNN): \"\"\"Numpy implmentation of a Feed", "if 'predictions' in measures: return_values['predictions'] = y_pred y_raw = y_raw.reshape(y_raw.shape[0],", "np.empty(x.shape) for i, func in enumerate(selected_funcs): assert func < len(available_funcs)", "measures: return_values['mse_loss'] = np.array([ sklearn.metrics.mean_squared_error(y_true, raw) for raw in y_raw", "Definition of the activations functions available_act_functions = [ ('relu', lambda", "np.argmax(y_raw, axis=-1) y_prob = softmax(y_raw, axis=-1) if 'probabilities' in measures:", "arrays assert isinstance(weights, np.ndarray) # initial activations act_vec = np.empty((weights.shape[0],", "active_nodes - self.offset] # x3d: weights, samples, source nodes #", "weights M3d = M[None, :, :] * base_weights[:, None, None]", "measures=['predictions']): assert len(x.shape) == 2 # multiple one dimensional input", "x: np.maximum(0, x)), ('sigmoid', lambda x: (np.tanh(x/2.0) + 1.0)/2.0), ('tanh',", "'mse_loss' in measures: return_values['mse_loss'] = np.array([ sklearn.metrics.mean_squared_error(y_true, raw) for raw", "for active nodes (active nodes can't share edges). \"\"\" addend_nodes", "if applied multiple times ('abs', lambda x: np.abs(x)), ('cos', lambda", "one dimensional input arrays assert isinstance(weights, np.ndarray) # initial activations", ":class:`wann_genetic.individual.torch.ffn.MultiActivationModule`. \"\"\" if x is not None: result = np.empty(x.shape)", "x: x), ('inverse', lambda x: -x), ('squared', lambda x: x**2),", "= np.array([ sklearn.metrics.accuracy_score(y_true, pred) for pred in y_pred ]) if", "if 'raw' in measures: return_values['raw'] = y_raw y_pred = np.argmax(y_raw,", "Forward Neural Network For an explanation of how propagation works,", "as maximally falsely predicted y_prob[~np.isfinite(y_prob)] = 0 return_values['log_loss'] = np.array([", "('identity', lambda x: x), ('inverse', lambda x: -x), ('squared', lambda", "= np.nan y_raw = act_vec[..., -self.n_out:] return self.measurements_from_output(y_raw, y_true, measures)", "= 0 return_values['log_loss'] = np.array([ sklearn.metrics.log_loss(y_true, prob, labels=np.arange(self.n_out)) for prob", "sklearn.metrics.accuracy_score(y_true, pred) for pred in y_pred ]) if 'kappa' in", "= y_raw y_pred = np.argmax(y_raw, axis=-1) y_prob = softmax(y_raw, axis=-1)", "base_weights, add_to_sum=0): \"\"\"Apply updates for active nodes (active nodes can't", "* base_weights[:, None, None] x3d = x[..., :addend_nodes] act_sums =", "np.expand_dims(e_x.sum(axis=-1), axis=axis)) return s def apply_act_function(available_funcs, selected_funcs, x=None): \"\"\"Apply the", "nodes can't share edges). \"\"\" addend_nodes = active_nodes[0] M =", "matrix with base weights M3d = M[None, :, :] *", "pred in y_pred ]) if 'kappa' in measures: return_values['kappa'] =", "applied multiple times ('abs', lambda x: np.abs(x)), ('cos', lambda x:", "y_true.reshape(-1) if 'log_loss' in measures: # nan is same as", "('cos', lambda x: np.cos(np.pi*x)), ('sin ', lambda x: np.sin(np.pi*x)), ]", "def apply_act_function(available_funcs, selected_funcs, x=None): \"\"\"Apply the activation function of the", "measurements_from_output(self, y_raw, y_true, measures): return_values = dict() if 'raw' in", "y_pred ]) return return_values def activation_functions(self, nodes, x=None): funcs =", "'predictions' in measures: return_values['predictions'] = y_pred y_raw = y_raw.reshape(y_raw.shape[0], -1,", "# multiple one dimensional input arrays assert isinstance(weights, np.ndarray) #", "labels=np.arange(self.n_out)) for prob in y_prob ]) if 'mse_loss' in measures:", "raw in y_raw ]) if 'accuracy' in measures: return_values['accuracy'] =", "available_funcs[func][1](x[..., i]) return result else: return np.array([ # return function", "input arrays assert isinstance(weights, np.ndarray) # initial activations act_vec =", "\"\"\" e_x = np.exp(x - np.expand_dims(np.max(x,axis=axis), axis=axis)) s = (e_x", "0 return_values['log_loss'] = np.array([ sklearn.metrics.log_loss(y_true, prob, labels=np.arange(self.n_out)) for prob in", "def get_measurements(self, weights, x, y_true=None, measures=['predictions']): assert len(x.shape) == 2", "measures: return_values['predictions'] = y_pred y_raw = y_raw.reshape(y_raw.shape[0], -1, self.n_out) y_prob", "(np.tanh(x/2.0) + 1.0)/2.0), ('tanh', lambda x: np.tanh(x)), ('gaussian (standard)', lambda", "y_true, measures): return_values = dict() if 'raw' in measures: return_values['raw']", "[ ('relu', lambda x: np.maximum(0, x)), ('sigmoid', lambda x: (np.tanh(x/2.0)", "base_weights[:, None, None] x3d = x[..., :addend_nodes] act_sums = np.matmul(x3d,", "Network For an explanation of how propagation works, see :doc:`numpy_network`.", "in measures: # nan is same as maximally falsely predicted", "multiply relevant weight matrix with base weights M3d = M[None,", "= y_prob if 'predictions' in measures: return_values['predictions'] = y_pred y_raw", "for prob in y_prob ]) if 'mse_loss' in measures: return_values['mse_loss']", "= (e_x / np.expand_dims(e_x.sum(axis=-1), axis=axis)) return s def apply_act_function(available_funcs, selected_funcs,", "selected_funcs, x=None): \"\"\"Apply the activation function of the selected nodes", "get_measurements(self, weights, x, y_true=None, measures=['predictions']): assert len(x.shape) == 2 #", "maximally falsely predicted y_prob[~np.isfinite(y_prob)] = 0 return_values['log_loss'] = np.array([ sklearn.metrics.log_loss(y_true,", "in measures: return_values['probabilities'] = y_prob if 'predictions' in measures: return_values['predictions']", "('squared', lambda x: x**2), # unstable if applied multiple times", "import sklearn import logging from wann_genetic.individual.network_base import BaseFFNN def softmax(x,", "if 'accuracy' in measures: return_values['accuracy'] = np.array([ sklearn.metrics.accuracy_score(y_true, pred) for", "func in selected_funcs ]) class Network(BaseFFNN): \"\"\"Numpy implmentation of a", "sets of scores in x. Returns: softmax - softmax normalized", "lambda x: np.abs(x)), ('cos', lambda x: np.cos(np.pi*x)), ('sin ', lambda", "Neural Network For an explanation of how propagation works, see", "sklearn.metrics.log_loss(y_true, prob, labels=np.arange(self.n_out)) for prob in y_prob ]) if 'mse_loss'", "updates for active nodes (active nodes can't share edges). \"\"\"", "= y_pred.reshape(y_raw.shape[0], -1) if y_true is not None: y_true =", "apply_act_function(self.enabled_act_functions, funcs, x) def calc_act(self, x, active_nodes, base_weights, add_to_sum=0): \"\"\"Apply", "if x is not None: result = np.empty(x.shape) for i,", "\"\"\" if x is not None: result = np.empty(x.shape) for", "x, y_true=None, measures=['predictions']): assert len(x.shape) == 2 # multiple one", "measures: return_values['kappa'] = np.array([ sklearn.metrics.cohen_kappa_score(y_true, pred) for pred in y_pred", "None, None] x3d = x[..., :addend_nodes] act_sums = np.matmul(x3d, M3d)", "# propagate signal through all layers for active_nodes in self.layers():", "1.0)/2.0), ('tanh', lambda x: np.tanh(x)), ('gaussian (standard)', lambda x: np.exp(-np.multiply(x,", "return_values['raw'] = y_raw y_pred = np.argmax(y_raw, axis=-1) y_prob = softmax(y_raw,", "import BaseFFNN def softmax(x, axis=-1): \"\"\"Compute softmax values for each", "self.calc_act(act_vec, active_nodes, weights) # if any node is nan, we", "y_prob[~np.isfinite(y_prob)] = 0 return_values['log_loss'] = np.array([ sklearn.metrics.log_loss(y_true, prob, labels=np.arange(self.n_out)) for", "np.array([ sklearn.metrics.accuracy_score(y_true, pred) for pred in y_pred ]) if 'kappa'", "x: np.abs(x)), ('cos', lambda x: np.cos(np.pi*x)), ('sin ', lambda x:", "in self.layers(): act_vec[..., active_nodes] = self.calc_act(act_vec, active_nodes, weights) # if", "from wann_genetic.individual.network_base import BaseFFNN def softmax(x, axis=-1): \"\"\"Compute softmax values", "available_funcs[func][0] for func in selected_funcs ]) class Network(BaseFFNN): \"\"\"Numpy implmentation", "def softmax(x, axis=-1): \"\"\"Compute softmax values for each sets of", "a Feed Forward Neural Network For an explanation of how", "return_values['kappa'] = np.array([ sklearn.metrics.cohen_kappa_score(y_true, pred) for pred in y_pred ])", "multiple times ('abs', lambda x: np.abs(x)), ('cos', lambda x: np.cos(np.pi*x)),", "lambda x: -x), ('squared', lambda x: x**2), # unstable if", "assert func < len(available_funcs) result[..., i] = available_funcs[func][1](x[..., i]) return", "for active_nodes in self.layers(): act_vec[..., active_nodes] = self.calc_act(act_vec, active_nodes, weights)", "measures): return_values = dict() if 'raw' in measures: return_values['raw'] =", "sklearn import logging from wann_genetic.individual.network_base import BaseFFNN def softmax(x, axis=-1):", "the activation function of the selected nodes to their sums.", "in measures: return_values['kappa'] = np.array([ sklearn.metrics.cohen_kappa_score(y_true, pred) for pred in", "not None: y_true = y_true.reshape(-1) if 'log_loss' in measures: #", "the selected nodes to their sums. This fullfils the same", "M3d) + add_to_sum # apply activation function for active nodes", "an explanation of how propagation works, see :doc:`numpy_network`. \"\"\" #", "times ('abs', lambda x: np.abs(x)), ('cos', lambda x: np.cos(np.pi*x)), ('sin", "self.n_out) y_pred = y_pred.reshape(y_raw.shape[0], -1) if y_true is not None:", "add_to_sum=0): \"\"\"Apply updates for active nodes (active nodes can't share", "/ np.expand_dims(e_x.sum(axis=-1), axis=axis)) return s def apply_act_function(available_funcs, selected_funcs, x=None): \"\"\"Apply", "same as maximally falsely predicted y_prob[~np.isfinite(y_prob)] = 0 return_values['log_loss'] =", "if 'mse_loss' in measures: return_values['mse_loss'] = np.array([ sklearn.metrics.mean_squared_error(y_true, raw) for", "- self.offset] return apply_act_function(self.enabled_act_functions, funcs, x) def calc_act(self, x, active_nodes,", "= np.all(~np.isnan(act_vec), axis=-1) act_vec[~valid, :] = np.nan y_raw = act_vec[...,", "'kappa' in measures: return_values['kappa'] = np.array([ sklearn.metrics.cohen_kappa_score(y_true, pred) for pred", "softmax normalized in dim axis \"\"\" e_x = np.exp(x -", "we cant rely on the result valid = np.all(~np.isnan(act_vec), axis=-1)", "= act_vec[..., -self.n_out:] return self.measurements_from_output(y_raw, y_true, measures) def measurements_from_output(self, y_raw,", "i, func in enumerate(selected_funcs): assert func < len(available_funcs) result[..., i]", "act_vec[~valid, :] = np.nan y_raw = act_vec[..., -self.n_out:] return self.measurements_from_output(y_raw,", "Feed Forward Neural Network For an explanation of how propagation", "1 # bias # propagate signal through all layers for", "nan, we cant rely on the result valid = np.all(~np.isnan(act_vec),", "np.array([ sklearn.metrics.log_loss(y_true, prob, labels=np.arange(self.n_out)) for prob in y_prob ]) if", "\"\"\"Compute softmax values for each sets of scores in x.", "y_pred.reshape(y_raw.shape[0], -1) if y_true is not None: y_true = y_true.reshape(-1)", "-self.n_out:] return self.measurements_from_output(y_raw, y_true, measures) def measurements_from_output(self, y_raw, y_true, measures):", "names available_funcs[func][0] for func in selected_funcs ]) class Network(BaseFFNN): \"\"\"Numpy", "= self.weight_matrix[:addend_nodes, active_nodes - self.offset] # x3d: weights, samples, source", "source nodes # M3d: weights, source, target # multiply relevant", "softmax values for each sets of scores in x. Returns:", "enumerate(selected_funcs): assert func < len(available_funcs) result[..., i] = available_funcs[func][1](x[..., i])", "('sigmoid', lambda x: (np.tanh(x/2.0) + 1.0)/2.0), ('tanh', lambda x: np.tanh(x)),", "# unstable if applied multiple times ('abs', lambda x: np.abs(x)),", "func < len(available_funcs) result[..., i] = available_funcs[func][1](x[..., i]) return result", "-x), ('squared', lambda x: x**2), # unstable if applied multiple", "same function as the :class:`wann_genetic.individual.torch.ffn.MultiActivationModule`. \"\"\" if x is not", "lambda x: 1.0*(x>0.0)), ('identity', lambda x: x), ('inverse', lambda x:", "= self.calc_act(act_vec, active_nodes, weights) # if any node is nan,", "y_true, measures) def measurements_from_output(self, y_raw, y_true, measures): return_values = dict()", "prob, labels=np.arange(self.n_out)) for prob in y_prob ]) if 'mse_loss' in", "else: return np.array([ # return function names available_funcs[func][0] for func", "function names available_funcs[func][0] for func in selected_funcs ]) class Network(BaseFFNN):", "available_act_functions = [ ('relu', lambda x: np.maximum(0, x)), ('sigmoid', lambda", "= y_true.reshape(-1) if 'log_loss' in measures: # nan is same", "values for each sets of scores in x. Returns: softmax", "act_vec[..., -self.n_out:] return self.measurements_from_output(y_raw, y_true, measures) def measurements_from_output(self, y_raw, y_true,", "y_pred = np.argmax(y_raw, axis=-1) y_prob = softmax(y_raw, axis=-1) if 'probabilities'", "lambda x: np.maximum(0, x)), ('sigmoid', lambda x: (np.tanh(x/2.0) + 1.0)/2.0),", "len(available_funcs) result[..., i] = available_funcs[func][1](x[..., i]) return result else: return", "np.array([ # return function names available_funcs[func][0] for func in selected_funcs", "= y_pred y_raw = y_raw.reshape(y_raw.shape[0], -1, self.n_out) y_prob = y_prob.reshape(y_raw.shape[0],", "lambda x: np.tanh(x)), ('gaussian (standard)', lambda x: np.exp(-np.multiply(x, x) /", "x), ('inverse', lambda x: -x), ('squared', lambda x: x**2), #", "# bias # propagate signal through all layers for active_nodes", "is same as maximally falsely predicted y_prob[~np.isfinite(y_prob)] = 0 return_values['log_loss']", "edges). \"\"\" addend_nodes = active_nodes[0] M = self.weight_matrix[:addend_nodes, active_nodes -", "in y_pred ]) return return_values def activation_functions(self, nodes, x=None): funcs", "can't share edges). \"\"\" addend_nodes = active_nodes[0] M = self.weight_matrix[:addend_nodes,", "signal through all layers for active_nodes in self.layers(): act_vec[..., active_nodes]", ":] * base_weights[:, None, None] x3d = x[..., :addend_nodes] act_sums", "self.offset] # x3d: weights, samples, source nodes # M3d: weights,", "Returns: softmax - softmax normalized in dim axis \"\"\" e_x", "x**2), # unstable if applied multiple times ('abs', lambda x:", "self.n_out) y_prob = y_prob.reshape(y_raw.shape[0], -1, self.n_out) y_pred = y_pred.reshape(y_raw.shape[0], -1)", "]) class Network(BaseFFNN): \"\"\"Numpy implmentation of a Feed Forward Neural", "('relu', lambda x: np.maximum(0, x)), ('sigmoid', lambda x: (np.tanh(x/2.0) +", "falsely predicted y_prob[~np.isfinite(y_prob)] = 0 return_values['log_loss'] = np.array([ sklearn.metrics.log_loss(y_true, prob,", "return_values['accuracy'] = np.array([ sklearn.metrics.accuracy_score(y_true, pred) for pred in y_pred ])", "nan is same as maximally falsely predicted y_prob[~np.isfinite(y_prob)] = 0", "np.empty((weights.shape[0], x.shape[0], self.n_nodes), dtype=float) act_vec[..., :self.n_in] = x[...] act_vec[..., self.n_in]", "on the result valid = np.all(~np.isnan(act_vec), axis=-1) act_vec[~valid, :] =", "return_values = dict() if 'raw' in measures: return_values['raw'] = y_raw", "weights, source, target # multiply relevant weight matrix with base" ]
[ "expected_data == imported_data def assert_many_records_match( expected: Sequence[TrackedModel], imported: Sequence[TrackedModel], ignore=frozenset(),", "from django.urls import reverse from freezegun import freeze_time from lxml", "def assert_many_records_match( expected: Sequence[TrackedModel], imported: Sequence[TrackedModel], ignore=frozenset(), ): \"\"\" Asserts", "django.template.loader import render_to_string from django.urls import reverse from freezegun import", "e in expected] imported_data = [get_checkable_data(i, ignore=ignore) for i in", "True EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED = False COMMODITIES_IMPLEMENTED = True MEURSING_TABLES_IMPLEMENTED = False", "4), >>> ) { \"start_date_0\": 1, \"start_date_1\": 2, \"start_date_2\": 2021,", "get_checkable_data(FootnoteDescriptionFactory(), ignore={\"sid\"}) # { # \"description\": \"My sample footnote text\",", "), \"overlap_normal_same_year\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1), ), \"overlap_big\": (relativedelta(years=+1), relativedelta(years=+3,", "common.util import TaricDateRange from common.util import get_accessor from common.util import", "return True return do_test return decorator def validity_period_post_data(start: date, end:", "{exception}\") def check_validator(validate, value, expected_valid): try: validate(value) except ValidationError: if", "BytesIO: xml = render_to_string( template_name=\"workbaskets/taric/transaction_detail.xml\", context={ \"envelope_id\": next(_transaction_counter), \"tracked_models\": [obj],", "data else form.fields[field].initial if hasattr(form.fields[field].widget, \"decompress\"): # If the widget", "# }, # } \"\"\" checked_field_names = {f.name for f", "def wraps( api_client, taric_schema, approved_transaction, valid_user, *args, **kwargs, ): if", "BytesIO from itertools import count from typing import Any from", "any fields that have sub-fields and hence result in multiple", "return BytesIO(xml.encode()) def taric_xml_record_codes(xml): \"\"\"Yields tuples of (record_code, subrecord_code)\"\"\" records", "data in the expected models, and that the count of", "requires_meursing_tables = pytest.mark.skipif( not MEURSING_TABLES_IMPLEMENTED, reason=\"Meursing tables not implemented\", )", "in records] def validate_taric_xml( factory=None, instance=None, factory_kwargs=None, check_order=True, ): def", "parse as parse_date from dateutil.relativedelta import relativedelta from django import", "from freezegun import freeze_time from lxml import etree from common.models.records", ") @contextlib.contextmanager def raises_if(exception, expected): try: yield except exception: if", "relativedelta(months=+1), ) @classmethod def no_end_before(cls, dt): return TaricDateRange( dt +", "[tuple(codes(record)) for record in records] def validate_taric_xml( factory=None, instance=None, factory_kwargs=None,", "if expected: pytest.fail(f\"Did not raise {exception}\") def check_validator(validate, value, expected_valid):", "for every field in the imported models is the same", "f\"XML errors: {taric_schema.error_log}\" if check_order: validate_taric_xml_record_order(xml) kwargs = {\"xml\": xml,", "(relativedelta(days=+14), relativedelta(months=+1)), \"current\": (relativedelta(weeks=-4), relativedelta(weeks=+4)), \"future\": (relativedelta(weeks=+10), relativedelta(weeks=+20)), \"no_end\": (relativedelta(),", "that the form will put onto a page and their", "expected_data = get_checkable_data(expected, ignore=ignore) imported_data = get_checkable_data(imported, ignore=ignore) assert expected_data", "So we need to generate one # form item per", "\"FN\" # \"footnote_id\": \"123\", # }, # } \"\"\" checked_field_names", "date in ((\"start_date\", start), (\"end_date\", end)) for i, part in", "dt + relativedelta(months=-1), dt + relativedelta(months=+1), ) @classmethod def no_end_before(cls,", "): \"\"\" Asserts that every value for every field in", "value and has some internal structure. So we need to", "overriding identifying_fields if identifying_fields is None: identifying_fields = list(factory._meta.model.identifying_fields) return", "**dict(get_field_tuple(existing, field) for field in identifying_fields) ) def make_non_duplicate_record(factory, identifying_fields=None):", "get_checkable_data(imported, ignore=ignore) assert expected_data == imported_data def assert_many_records_match( expected: Sequence[TrackedModel],", "need to be provided - not both.\", ) current_instance =", "import count from typing import Any from typing import Dict", "= {f.name for f in model.copyable_fields} - ignore data =", "@contextlib.contextmanager def raises_if(exception, expected): try: yield except exception: if not", "{\"format\": \"xml\"}, ) assert response.status_code == 200 content = response.content", "and fields with names passed to `ignore`. The returned data", "the identifying fields for any linked models rather than internal", "record created last.\"\"\" existing = factory.create() not_duplicate = factory.create() if", "\"workbaskets:workbasket-detail\", kwargs={\"pk\": approved_transaction.workbasket.pk}, ), {\"format\": \"xml\"}, ) assert response.status_code ==", "+ relativedelta(days=+14), dt + relativedelta(months=+1), ) @classmethod def short_overlap(cls, dt):", "= get_checkable_data(imported, ignore=ignore) assert expected_data == imported_data def assert_many_records_match( expected:", "2021, \"end_date_0\": 4, \"end_date_1\": 3, \"end_date_2\": 2022, } \"\"\" return", "# etc. This mirrors the MultiValueWidget in django/forms/widgets.py. if field", "implemented\", ) requires_meursing_tables = pytest.mark.skipif( not MEURSING_TABLES_IMPLEMENTED, reason=\"Meursing tables not", "self.now + end return TaricDateRange(start, end) raise AttributeError(name) @classmethod def", "short_overlap(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(months=+1),", "some internal structure. So we need to generate one #", "relativedelta(years=+2, months=+2), ), \"overlap_normal\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1, years=+1), ),", "functools import wraps from io import BytesIO from itertools import", "current values, taking account of any fields that have sub-fields", "taric_xml_record_codes(xml): \"\"\"Yields tuples of (record_code, subrecord_code)\"\"\" records = xml.xpath(\".//*[local-name() =", "existing = factory.create() # allow overriding identifying_fields if identifying_fields is", "TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(days=-1), ) @classmethod def", "passed factory that are not duplicates of each other and", "datetime_now(self): return datetime.now(tz=UTC).replace(hour=0, minute=0, second=0, microsecond=0) def __getattr__(self, name): if", "None), \"normal_first_half\": (relativedelta(), relativedelta(days=+14)), } @property def now(self): return self.datetime_now.date()", "hasattr(data[name], \"get_identifying_fields\") } data.update(identifying_fields) return data def assert_records_match( expected: TrackedModel,", "} data.update(identifying_fields) return data def assert_records_match( expected: TrackedModel, imported: TrackedModel,", "import etree from common.models.records import TrackedModel from common.renderers import counter_generator", "200 content = response.content xml = etree.XML(content) taric_schema.validate(xml) assert not", "datetime from datetime import timezone from functools import wraps from", "or an object instance need to be provided\", ) if", "fail. \"\"\" cutoff = parse_date(cutoff) def decorator(fn): @wraps(fn) def do_test(*args,", "import reverse from freezegun import freeze_time from lxml import etree", "every field in the imported model is the same as", "(relativedelta(weeks=+10), relativedelta(weeks=+20)), \"no_end\": (relativedelta(), None), \"normal_first_half\": (relativedelta(), relativedelta(days=+14)), } @property", "name, date in ((\"start_date\", start), (\"end_date\", end)) for i, part", ") if factory and instance: raise AssertionError( \"Either a factory", "= api_client.get( reverse( \"workbaskets:workbasket-detail\", kwargs={\"pk\": approved_transaction.workbasket.pk}, ), {\"format\": \"xml\"}, )", "last.\"\"\" existing = factory.create() # allow overriding identifying_fields if identifying_fields", "dt): return TaricDateRange( dt + relativedelta(months=-1), None, ) def only_applicable_after(cutoff):", "ignore=ignore) assert expected_data == imported_data def assert_many_records_match( expected: Sequence[TrackedModel], imported:", "count(start=1) def generate_test_import_xml(obj: dict) -> BytesIO: xml = render_to_string( template_name=\"workbaskets/taric/transaction_detail.xml\",", "pass except Exception: raise else: pytest.fail(f\"Rule applied before {cutoff:%Y-%m-%d}\") return", "dict representing the model's data ignoring any automatically set fields", "text\", # \"described_footnote\": { # \"footnote_type__footnote_type_id\": \"FN\" # \"footnote_id\": \"123\",", "be implemented\", ) @contextlib.contextmanager def raises_if(exception, expected): try: yield except", "for record in records] def validate_taric_xml( factory=None, instance=None, factory_kwargs=None, check_order=True,", "to generate one # form item per decompressed value and", "append the name with _0, _1, # etc. This mirrors", "data[name].get_identifying_fields() for name in checked_field_names if hasattr(data[name], \"get_identifying_fields\") } data.update(identifying_fields)", "= response.content xml = etree.XML(content) taric_schema.validate(xml) assert not taric_schema.error_log, f\"XML", "relativedelta(days=+14)), } @property def now(self): return self.datetime_now.date() @property def datetime_now(self):", "short_after(cls, dt): return TaricDateRange( dt + relativedelta(days=+14), dt + relativedelta(months=+1),", "validity_period_post_data(start: date, end: date) -> Dict[str, int]: \"\"\" Construct a", "\"123\", # }, # } \"\"\" checked_field_names = {f.name for", "models rather than internal PKs. For example: get_checkable_data(FootnoteDescriptionFactory(), ignore={\"sid\"}) #", "checked_field_names if hasattr(data[name], \"get_identifying_fields\") } data.update(identifying_fields) return data def assert_records_match(", "or {} ) api_client.force_login(user=valid_user) response = api_client.get( reverse( \"workbaskets:workbasket-detail\", kwargs={\"pk\":", "relativedelta(weeks=+4)), \"future\": (relativedelta(weeks=+10), relativedelta(weeks=+20)), \"no_end\": (relativedelta(), None), \"normal_first_half\": (relativedelta(), relativedelta(days=+14)),", "# \"described_footnote\": { # \"footnote_type__footnote_type_id\": \"FN\" # \"footnote_id\": \"123\", #", "are duplicates of each other and returns the record created", "assert_records_match( expected: TrackedModel, imported: TrackedModel, ignore=frozenset(), ): \"\"\" Asserts that", "will also not be checked. \"\"\" expected_data = [get_checkable_data(e, ignore=ignore)", ") return BytesIO(xml.encode()) def taric_xml_record_codes(xml): \"\"\"Yields tuples of (record_code, subrecord_code)\"\"\"", "multiple HTML <input> objects.\"\"\" data = {**form.initial} for field in", "a dictionary of the fields that the form will put", "TrackedModel, imported: TrackedModel, ignore=frozenset(), ): \"\"\" Asserts that every value", "relativedelta(months=+1)), \"earlier\": (relativedelta(years=-1), relativedelta(years=-1, months=+1)), \"later\": ( relativedelta(years=+1, months=+1, days=+1),", "try: fn(*args, **kwargs) except pytest.fail.Exception: pass except Exception: raise else:", "returns the record created last.\"\"\" existing = factory.create() not_duplicate =", "the imported models is the same as the data in", "return decorator def validity_period_post_data(start: date, end: date) -> Dict[str, int]:", "_0, _1, # etc. This mirrors the MultiValueWidget in django/forms/widgets.py.", "def decorator(func): def wraps( api_client, taric_schema, approved_transaction, valid_user, *args, **kwargs,", "days=+1)), \"adjacent\": (relativedelta(days=+1), relativedelta(months=+1)), \"adjacent_earlier\": (relativedelta(months=-1), relativedelta(days=-1)), \"adjacent_later\": (relativedelta(months=+1, days=+1),", "forms.ModelForm) -> Dict[str, Any]: \"\"\"Returns a dictionary of the fields", "counter_generator from common.serializers import validate_taric_xml_record_order from common.util import TaricDateRange from", "- ignore data = { name: getattr(model, get_accessor(model._meta.get_field(name))) for name", "factory that are not duplicates of each other and returns", "end = self.now + end return TaricDateRange(start, end) raise AttributeError(name)", "the name with _0, _1, # etc. This mirrors the", "ignoring any automatically set fields and fields with names passed", "reason=\"Commodities not implemented\", ) requires_export_refund_nomenclature = pytest.mark.skipif( not EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED, reason=\"Export", "PARTIAL_TEMPORARY_STOP_IMPLEMENTED, reason=\"Partial temporary stop not implemented\", ) requires_interdependent_import = pytest.mark.skipif(", "context={ \"envelope_id\": next(_transaction_counter), \"tracked_models\": [obj], \"transaction_id\": next(_transaction_counter), \"message_counter\": counter_generator(), \"counter_generator\":", "test should fail. \"\"\" cutoff = parse_date(cutoff) def decorator(fn): @wraps(fn)", "if not factory and not instance: raise AssertionError( \"Either a", "days=+1), relativedelta(years=+2, months=+2), ), \"overlap_normal\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1, years=+1),", "start if end is not None: end = self.now +", "need to be provided\", ) if factory and instance: raise", "\"\"\"Yields tuples of (record_code, subrecord_code)\"\"\" records = xml.xpath(\".//*[local-name() = 'record']\")", "taking account of any fields that have sub-fields and hence", "return { f\"{name}_{i}\": part for name, date in ((\"start_date\", start),", "= render_to_string( template_name=\"workbaskets/taric/transaction_detail.xml\", context={ \"envelope_id\": next(_transaction_counter), \"tracked_models\": [obj], \"transaction_id\": next(_transaction_counter),", "start), (\"end_date\", end)) for i, part in enumerate([date.day, date.month, date.year])", "factory.create( **dict(get_field_tuple(existing, field) for field in identifying_fields) ) def make_non_duplicate_record(factory,", "not_duplicate = factory.create() if identifying_fields is None: identifying_fields = list(factory._meta.model.identifying_fields)", "!= get_field_tuple(not_duplicate, f) for f in identifying_fields ) return not_duplicate", "a simple # value and has some internal structure. So", "relativedelta(months=+1, days=+1), relativedelta(years=+2, months=+2), ), \"overlap_normal\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1,", "import Dict from typing import Sequence import pytest from dateutil.parser", "# form item per decompressed value and append the name", "field) for field in identifying_fields) ) def make_non_duplicate_record(factory, identifying_fields=None): \"\"\"Creates", "\"overlap_normal_same_year\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1), ), \"overlap_big\": (relativedelta(years=+1), relativedelta(years=+3, days=+2)),", "@classmethod def short_overlap(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), dt", "linked models rather than internal PKs. For example: get_checkable_data(FootnoteDescriptionFactory(), ignore={\"sid\"})", "freezegun import freeze_time from lxml import etree from common.models.records import", "): if not factory and not instance: raise AssertionError( \"Either", "be provided - not both.\", ) current_instance = instance or", "ignore={\"sid\"}) # { # \"description\": \"My sample footnote text\", #", "both.\", ) current_instance = instance or factory.create( transaction=approved_transaction, **factory_kwargs or", "(relativedelta(days=+1), relativedelta(months=+1)), \"adjacent_earlier\": (relativedelta(months=-1), relativedelta(days=-1)), \"adjacent_later\": (relativedelta(months=+1, days=+1), relativedelta(months=+2)), \"adjacent_no_end\":", "except Exception: raise else: pytest.fail(f\"Rule applied before {cutoff:%Y-%m-%d}\") return True", "of any fields that have sub-fields and hence result in", "relativedelta(years=-2, months=-2), relativedelta(years=-2), ), \"adjacent_later_big\": ( relativedelta(months=+1, days=+1), relativedelta(years=+2, months=+2),", "ignore=frozenset(), ): \"\"\" Asserts that every value for every field", "typing import Dict from typing import Sequence import pytest from", "def check_validator(validate, value, expected_valid): try: validate(value) except ValidationError: if expected_valid:", "using the passed factory that are not duplicates of each", "**kwargs) except pytest.fail.Exception: pass except Exception: raise else: pytest.fail(f\"Rule applied", "HTML <input> objects.\"\"\" data = {**form.initial} for field in form.rendered_fields:", "implemented\", ) requires_interdependent_import = pytest.mark.skipif( not INTERDEPENDENT_IMPORT_IMPLEMENTED, reason=\"Interdependent imports not", "data = {**form.initial} for field in form.rendered_fields: value = data[field]", "as parse_date from dateutil.relativedelta import relativedelta from django import forms", ") def make_non_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records using the passed", "assert expected_data == imported_data def assert_many_records_match( expected: Sequence[TrackedModel], imported: Sequence[TrackedModel],", "the same as the data in the expected model. System", "from common.serializers import validate_taric_xml_record_order from common.util import TaricDateRange from common.util", "= [get_checkable_data(i, ignore=ignore) for i in imported] assert expected_data ==", "records using the passed factory that are duplicates of each", "dictionary of the fields that the form will put onto", "if not expected_valid: pytest.fail(f'Expected validation error for value \"{value}\"') def", "\"\"\" Returns a dict representing the model's data ignoring any", "names passed to `ignore`. The returned data will contain the", "objects, eg: >>> validity_period_post_data( >>> datetime.date(2021, 1, 2), >>> datetime.date(2022,", "models, and that the count of both is equal. System", "in enumerate(value) if v is not None} ) elif value", "to `ignore`. The returned data will contain the identifying fields", ">>> datetime.date(2022, 3, 4), >>> ) { \"start_date_0\": 1, \"start_date_1\":", "allow overriding identifying_fields if identifying_fields is None: identifying_fields = list(factory._meta.model.identifying_fields)", "for every field in the imported model is the same", "else: if expected: pytest.fail(f\"Did not raise {exception}\") def check_validator(validate, value,", "relativedelta(days=-1), ) @classmethod def short_after(cls, dt): return TaricDateRange( dt +", "months=-2), relativedelta(years=-2), ), \"adjacent_later_big\": ( relativedelta(months=+1, days=+1), relativedelta(years=+2, months=+2), ),", "the count of both is equal. System fields that will", "{ name: getattr(model, get_accessor(model._meta.get_field(name))) for name in checked_field_names } identifying_fields", "will put onto a page and their current values, taking", "( relativedelta(years=+3, months=+1), relativedelta(years=+3, months=+2), ), \"backwards\": (relativedelta(months=+1), relativedelta(days=+1)), \"starts_with_normal\":", "is not just a simple # value and has some", "get_checkable_data(model: TrackedModel, ignore=frozenset()): \"\"\" Returns a dict representing the model's", "the imported model is the same as the data in", "part in enumerate([date.day, date.month, date.year]) } def get_form_data(form: forms.ModelForm) ->", "return not_duplicate def get_checkable_data(model: TrackedModel, ignore=frozenset()): \"\"\" Returns a dict", "pytest.fail(f'Expected validation error for value \"{value}\"') def make_duplicate_record(factory, identifying_fields=None): \"\"\"Creates", "if check_order: validate_taric_xml_record_order(xml) kwargs = {\"xml\": xml, **kwargs} func( *args,", "start and end dates of a ValidityPeriodForm from the given", "**kwargs} func( *args, **kwargs, ) return wraps return decorator class", "last.\"\"\" existing = factory.create() not_duplicate = factory.create() if identifying_fields is", "dt + relativedelta(months=-1), None, ) def only_applicable_after(cutoff): \"\"\" Decorator which", "date, end: date) -> Dict[str, int]: \"\"\" Construct a POST", "onto a page and their current values, taking account of", "BytesIO(xml.encode()) def taric_xml_record_codes(xml): \"\"\"Yields tuples of (record_code, subrecord_code)\"\"\" records =", "\"big\": (relativedelta(years=-2), relativedelta(years=+2, days=+1)), \"adjacent\": (relativedelta(days=+1), relativedelta(months=+1)), \"adjacent_earlier\": (relativedelta(months=-1), relativedelta(days=-1)),", "simple # value and has some internal structure. So we", "a factory or an object instance need to be provided", "approved_transaction, valid_user, *args, **kwargs, ): if not factory and not", "now(self): return self.datetime_now.date() @property def datetime_now(self): return datetime.now(tz=UTC).replace(hour=0, minute=0, second=0,", "make_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records using the passed factory that", "False UTC = timezone.utc requires_commodities = pytest.mark.skipif( not COMMODITIES_IMPLEMENTED, reason=\"Commodities", "django/forms/widgets.py. if field in data: del data[field] value = form.fields[field].widget.decompress(value)", ") api_client.force_login(user=valid_user) response = api_client.get( reverse( \"workbaskets:workbasket-detail\", kwargs={\"pk\": approved_transaction.workbasket.pk}, ),", "((\"start_date\", start), (\"end_date\", end)) for i, part in enumerate([date.day, date.month,", "TaricDateRange( dt + relativedelta(months=-1), None, ) def only_applicable_after(cutoff): \"\"\" Decorator", "not MEURSING_TABLES_IMPLEMENTED, reason=\"Meursing tables not implemented\", ) requires_partial_temporary_stop = pytest.mark.skipif(", "passed factory that are duplicates of each other and returns", "relativedelta(days=+14, months=+1, years=+1), ), \"overlap_normal_earlier\": ( relativedelta(months=-1, days=+14), relativedelta(days=+14), ),", "for i, part in enumerate([date.day, date.month, date.year]) } def get_form_data(form:", "+ start if end is not None: end = self.now", "implemented\", ) @contextlib.contextmanager def raises_if(exception, expected): try: yield except exception:", "fields with names passed to `ignore`. The returned data will", "data[field] value = form.fields[field].widget.decompress(value) data.update( **{f\"{field}_{i}\": v for i, v", "\"normal\": (relativedelta(), relativedelta(months=+1)), \"earlier\": (relativedelta(years=-1), relativedelta(years=-1, months=+1)), \"later\": ( relativedelta(years=+1,", "content = response.content xml = etree.XML(content) taric_schema.validate(xml) assert not taric_schema.error_log,", "value for every field in the imported model is the", "If the widget can be decompressed, then it is not", "relativedelta from django import forms from django.core.exceptions import ValidationError from", "\"adjacent_later_big\": ( relativedelta(months=+1, days=+1), relativedelta(years=+2, months=+2), ), \"overlap_normal\": ( relativedelta(days=+15),", "from common.util import TaricDateRange from common.util import get_accessor from common.util", "expected): try: yield except exception: if not expected: raise else:", "*args, **kwargs, ): if not factory and not instance: raise", ") { \"start_date_0\": 1, \"start_date_1\": 2, \"start_date_2\": 2021, \"end_date_0\": 4,", "common.models.records import TrackedModel from common.renderers import counter_generator from common.serializers import", "implemented\", ) requires_update_importer = pytest.mark.skipif( not UPDATE_IMPORTER_IMPLEMENTED, reason=\"Requires Updating importers", "the widget can be decompressed, then it is not just", "if identifying_fields is None: identifying_fields = list(factory._meta.model.identifying_fields) return factory.create( **dict(get_field_tuple(existing,", "in model.copyable_fields} - ignore data = { name: getattr(model, get_accessor(model._meta.get_field(name)))", "\"\"\" Decorator which asserts that a test fails after a", "typing import Sequence import pytest from dateutil.parser import parse as", "hasattr(form.fields[field].widget, \"decompress\"): # If the widget can be decompressed, then", "[get_checkable_data(i, ignore=ignore) for i in imported] assert expected_data == imported_data", "i in imported] assert expected_data == imported_data _transaction_counter = count(start=1)", "def raises_if(exception, expected): try: yield except exception: if not expected:", "automatically set fields and fields with names passed to `ignore`.", "months=+2), ), \"big\": (relativedelta(years=-2), relativedelta(years=+2, days=+1)), \"adjacent\": (relativedelta(days=+1), relativedelta(months=+1)), \"adjacent_earlier\":", "import BytesIO from itertools import count from typing import Any", "dt + relativedelta(days=-14), ) @classmethod def medium_before(cls, dt): return TaricDateRange(", "in form.rendered_fields: value = data[field] if field in data else", "= True EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED = False COMMODITIES_IMPLEMENTED = True MEURSING_TABLES_IMPLEMENTED =", "decorator(func): def wraps( api_client, taric_schema, approved_transaction, valid_user, *args, **kwargs, ):", "= pytest.mark.skipif( not UPDATE_IMPORTER_IMPLEMENTED, reason=\"Requires Updating importers to be implemented\",", ") requires_meursing_tables = pytest.mark.skipif( not MEURSING_TABLES_IMPLEMENTED, reason=\"Meursing tables not implemented\",", "pytest.mark.skipif( not MEURSING_TABLES_IMPLEMENTED, reason=\"Meursing tables not implemented\", ) requires_partial_temporary_stop =", "valid_user, *args, **kwargs, ): if not factory and not instance:", "requires_interdependent_import = pytest.mark.skipif( not INTERDEPENDENT_IMPORT_IMPLEMENTED, reason=\"Interdependent imports not implemented\", )", "\"tracked_models\": [obj], \"transaction_id\": next(_transaction_counter), \"message_counter\": counter_generator(), \"counter_generator\": counter_generator, }, )", "the data in the expected models, and that the count", "days=+14), relativedelta(days=+14), ), \"overlap_normal_same_year\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1), ), \"overlap_big\":", "expected: raise else: if expected: pytest.fail(f\"Did not raise {exception}\") def", "only_applicable_after(cutoff): \"\"\" Decorator which asserts that a test fails after", "\"Either a factory or an object instance need to be", "from django.template.loader import render_to_string from django.urls import reverse from freezegun", "will change from model to model are not checked. Any", "it is not just a simple # value and has", "value and append the name with _0, _1, # etc.", "# allow overriding identifying_fields if identifying_fields is None: identifying_fields =", "= { name: data[name].get_identifying_fields() for name in checked_field_names if hasattr(data[name],", "just a simple # value and has some internal structure.", "no_end_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), None, ) def", "that will change from model to model are not checked.", ") requires_partial_temporary_stop = pytest.mark.skipif( not PARTIAL_TEMPORARY_STOP_IMPLEMENTED, reason=\"Partial temporary stop not", "= list(factory._meta.model.identifying_fields) return factory.create( **dict(get_field_tuple(existing, field) for field in identifying_fields)", "the passed factory that are not duplicates of each other", "every value for every field in the imported models is", "count from typing import Any from typing import Dict from", "( relativedelta(years=-2, months=-2), relativedelta(years=-2), ), \"adjacent_later_big\": ( relativedelta(months=+1, days=+1), relativedelta(years=+2,", "of the fields that the form will put onto a", "as the data in the expected model. System fields that", "next(_transaction_counter), \"tracked_models\": [obj], \"transaction_id\": next(_transaction_counter), \"message_counter\": counter_generator(), \"counter_generator\": counter_generator, },", "None: end = self.now + end return TaricDateRange(start, end) raise", "a POST data fragment for the validity period start and", "@property def datetime_now(self): return datetime.now(tz=UTC).replace(hour=0, minute=0, second=0, microsecond=0) def __getattr__(self,", "with freeze_time(cutoff + relativedelta(days=-1)): try: fn(*args, **kwargs) except pytest.fail.Exception: pass", "from common.util import get_accessor from common.util import get_field_tuple INTERDEPENDENT_IMPORT_IMPLEMENTED =", "PKs. For example: get_checkable_data(FootnoteDescriptionFactory(), ignore={\"sid\"}) # { # \"description\": \"My", "+ relativedelta(months=-1), dt + relativedelta(days=-14), ) @classmethod def medium_before(cls, dt):", "factory or an object instance need to be provided\", )", "relativedelta(days=+15), relativedelta(days=+14, months=+1), ), \"overlap_big\": (relativedelta(years=+1), relativedelta(years=+3, days=+2)), \"after_big\": (", "factory=None, instance=None, factory_kwargs=None, check_order=True, ): def decorator(func): def wraps( api_client,", "etree.XPath( \".//*[local-name()='record.code' or local-name()='subrecord.code']/text()\", ) return [tuple(codes(record)) for record in", "\"\"\" expected_data = [get_checkable_data(e, ignore=ignore) for e in expected] imported_data", "validation error for value \"{value}\"') except Exception: raise else: if", "factory.create() if identifying_fields is None: identifying_fields = list(factory._meta.model.identifying_fields) assert any(", "checked_field_names = {f.name for f in model.copyable_fields} - ignore data", "def taric_xml_record_codes(xml): \"\"\"Yields tuples of (record_code, subrecord_code)\"\"\" records = xml.xpath(\".//*[local-name()", "EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED = False COMMODITIES_IMPLEMENTED = True MEURSING_TABLES_IMPLEMENTED = False PARTIAL_TEMPORARY_STOP_IMPLEMENTED", "factory and instance: raise AssertionError( \"Either a factory or an", "stop not implemented\", ) requires_interdependent_import = pytest.mark.skipif( not INTERDEPENDENT_IMPORT_IMPLEMENTED, reason=\"Interdependent", "@classmethod def short_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), dt", "pytest.fail.Exception: pass except Exception: raise else: pytest.fail(f\"Rule applied before {cutoff:%Y-%m-%d}\")", "import TaricDateRange from common.util import get_accessor from common.util import get_field_tuple", "TaricDateRange(start, end) raise AttributeError(name) @classmethod def short_before(cls, dt): return TaricDateRange(", "identifying fields for any linked models rather than internal PKs.", "# If the widget can be decompressed, then it is", "to model are not checked. Any field names given to", "datetime.now(tz=UTC).replace(hour=0, minute=0, second=0, microsecond=0) def __getattr__(self, name): if name in", "data.update( **{f\"{field}_{i}\": v for i, v in enumerate(value) if v", "getattr(model, get_accessor(model._meta.get_field(name))) for name in checked_field_names } identifying_fields = {", "import get_field_tuple INTERDEPENDENT_IMPORT_IMPLEMENTED = True UPDATE_IMPORTER_IMPLEMENTED = True EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED =", "requires_update_importer = pytest.mark.skipif( not UPDATE_IMPORTER_IMPLEMENTED, reason=\"Requires Updating importers to be", "start, end = self.deltas[name] start = self.now + start if", "\"\"\"Returns a dictionary of the fields that the form will", "object instance need to be provided - not both.\", )", "\"\"\" Construct a POST data fragment for the validity period", "from common.renderers import counter_generator from common.serializers import validate_taric_xml_record_order from common.util", "data[field] if field in data else form.fields[field].initial if hasattr(form.fields[field].widget, \"decompress\"):", ") @classmethod def medium_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1),", "{taric_schema.error_log}\" if check_order: validate_taric_xml_record_order(xml) kwargs = {\"xml\": xml, **kwargs} func(", "should fail before cutoff with freeze_time(cutoff + relativedelta(days=-1)): try: fn(*args,", "decorator def validity_period_post_data(start: date, end: date) -> Dict[str, int]: \"\"\"", "(relativedelta(), relativedelta(months=+1)), \"earlier\": (relativedelta(years=-1), relativedelta(years=-1, months=+1)), \"later\": ( relativedelta(years=+1, months=+1,", "is the same as the data in the expected model.", "import relativedelta from django import forms from django.core.exceptions import ValidationError", "\"ends_with_normal\": (relativedelta(days=+14), relativedelta(months=+1)), \"current\": (relativedelta(weeks=-4), relativedelta(weeks=+4)), \"future\": (relativedelta(weeks=+10), relativedelta(weeks=+20)), \"no_end\":", "expected] imported_data = [get_checkable_data(i, ignore=ignore) for i in imported] assert", "from functools import wraps from io import BytesIO from itertools", "also not be checked. \"\"\" expected_data = get_checkable_data(expected, ignore=ignore) imported_data", "that a test fails after a specified cutoff date. :param", "def get_form_data(form: forms.ModelForm) -> Dict[str, Any]: \"\"\"Returns a dictionary of", "wraps( api_client, taric_schema, approved_transaction, valid_user, *args, **kwargs, ): if not", "value = form.fields[field].widget.decompress(value) data.update( **{f\"{field}_{i}\": v for i, v in", "return decorator class Dates: deltas = { \"normal\": (relativedelta(), relativedelta(months=+1)),", "check_order: validate_taric_xml_record_order(xml) kwargs = {\"xml\": xml, **kwargs} func( *args, **kwargs,", "months=+1)), \"later\": ( relativedelta(years=+1, months=+1, days=+1), relativedelta(years=+1, months=+2), ), \"big\":", "= pytest.mark.skipif( not INTERDEPENDENT_IMPORT_IMPLEMENTED, reason=\"Interdependent imports not implemented\", ) requires_update_importer", "as the data in the expected models, and that the", "pytest.fail(f\"Did not raise {exception}\") def check_validator(validate, value, expected_valid): try: validate(value)", "relativedelta(months=-1), None, ) def only_applicable_after(cutoff): \"\"\" Decorator which asserts that", "decompressed, then it is not just a simple # value", "\"\"\" Asserts that every value for every field in the", "import timezone from functools import wraps from io import BytesIO", "response.status_code == 200 content = response.content xml = etree.XML(content) taric_schema.validate(xml)", "dt): return TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(days=-1), )", "relativedelta(months=-1), dt + relativedelta(days=-1), ) @classmethod def short_after(cls, dt): return", "date.year]) } def get_form_data(form: forms.ModelForm) -> Dict[str, Any]: \"\"\"Returns a", "datetime import datetime from datetime import timezone from functools import", "MEURSING_TABLES_IMPLEMENTED = False PARTIAL_TEMPORARY_STOP_IMPLEMENTED = False UTC = timezone.utc requires_commodities", "common.util import get_accessor from common.util import get_field_tuple INTERDEPENDENT_IMPORT_IMPLEMENTED = True", "not UPDATE_IMPORTER_IMPLEMENTED, reason=\"Requires Updating importers to be implemented\", ) @contextlib.contextmanager", "or an object instance need to be provided - not", "Decorator which asserts that a test fails after a specified", "each other and returns the record created last.\"\"\" existing =", "\"footnote_type__footnote_type_id\": \"FN\" # \"footnote_id\": \"123\", # }, # } \"\"\"", "'record']\") codes = etree.XPath( \".//*[local-name()='record.code' or local-name()='subrecord.code']/text()\", ) return [tuple(codes(record))", "relativedelta(years=+3, months=+1), relativedelta(years=+3, months=+2), ), \"backwards\": (relativedelta(months=+1), relativedelta(days=+1)), \"starts_with_normal\": (relativedelta(),", "import ValidationError from django.template.loader import render_to_string from django.urls import reverse", "\"My sample footnote text\", # \"described_footnote\": { # \"footnote_type__footnote_type_id\": \"FN\"", "relativedelta(years=+3, months=+2), ), \"backwards\": (relativedelta(months=+1), relativedelta(days=+1)), \"starts_with_normal\": (relativedelta(), relativedelta(days=+14)), \"ends_with_normal\":", "= data[field] if field in data else form.fields[field].initial if hasattr(form.fields[field].widget,", "the form will put onto a page and their current", "**kwargs, ) return wraps return decorator class Dates: deltas =", "`ignore` will also not be checked. \"\"\" expected_data = [get_checkable_data(e,", "Exception: raise else: pytest.fail(f\"Rule applied before {cutoff:%Y-%m-%d}\") return True return", "with names passed to `ignore`. The returned data will contain", "), \"overlap_big\": (relativedelta(years=+1), relativedelta(years=+3, days=+2)), \"after_big\": ( relativedelta(years=+3, months=+1), relativedelta(years=+3,", "from django.core.exceptions import ValidationError from django.template.loader import render_to_string from django.urls", "implemented\", ) requires_export_refund_nomenclature = pytest.mark.skipif( not EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED, reason=\"Export refund nomenclature", "record in records] def validate_taric_xml( factory=None, instance=None, factory_kwargs=None, check_order=True, ):", "\"adjacent_earlier_big\": ( relativedelta(years=-2, months=-2), relativedelta(years=-2), ), \"adjacent_later_big\": ( relativedelta(months=+1, days=+1),", "in django/forms/widgets.py. if field in data: del data[field] value =", "value \"{value}\"') except Exception: raise else: if not expected_valid: pytest.fail(f'Expected", "not PARTIAL_TEMPORARY_STOP_IMPLEMENTED, reason=\"Partial temporary stop not implemented\", ) requires_interdependent_import =", "months=+1, days=+1), relativedelta(years=+1, months=+2), ), \"big\": (relativedelta(years=-2), relativedelta(years=+2, days=+1)), \"adjacent\":", "end dates of a ValidityPeriodForm from the given date objects,", "relativedelta(days=+14), ), \"overlap_normal_same_year\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1), ), \"overlap_big\": (relativedelta(years=+1),", "in multiple HTML <input> objects.\"\"\" data = {**form.initial} for field", "value \"{value}\"') def make_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records using the", "{ name: data[name].get_identifying_fields() for name in checked_field_names if hasattr(data[name], \"get_identifying_fields\")", "cutoff = parse_date(cutoff) def decorator(fn): @wraps(fn) def do_test(*args, **kwargs): #", "field in form.rendered_fields: value = data[field] if field in data", "can be decompressed, then it is not just a simple", "temporary stop not implemented\", ) requires_interdependent_import = pytest.mark.skipif( not INTERDEPENDENT_IMPORT_IMPLEMENTED,", "+ relativedelta(months=-1), None, ) def only_applicable_after(cutoff): \"\"\" Decorator which asserts", "be decompressed, then it is not just a simple #", "end = self.deltas[name] start = self.now + start if end", "from typing import Any from typing import Dict from typing", "is None: identifying_fields = list(factory._meta.model.identifying_fields) assert any( get_field_tuple(existing, f) !=", "kwargs={\"pk\": approved_transaction.workbasket.pk}, ), {\"format\": \"xml\"}, ) assert response.status_code == 200", "@classmethod def medium_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), dt", "MultiValueWidget in django/forms/widgets.py. if field in data: del data[field] value", "count of both is equal. System fields that will change", "(relativedelta(years=-1), relativedelta(years=-1, months=+1)), \"later\": ( relativedelta(years=+1, months=+1, days=+1), relativedelta(years=+1, months=+2),", "days=+2)), \"after_big\": ( relativedelta(years=+3, months=+1), relativedelta(years=+3, months=+2), ), \"backwards\": (relativedelta(months=+1),", "1, \"start_date_1\": 2, \"start_date_2\": 2021, \"end_date_0\": 4, \"end_date_1\": 3, \"end_date_2\":", "and hence result in multiple HTML <input> objects.\"\"\" data =", "validate_taric_xml( factory=None, instance=None, factory_kwargs=None, check_order=True, ): def decorator(func): def wraps(", "not expected: raise else: if expected: pytest.fail(f\"Did not raise {exception}\")", "we need to generate one # form item per decompressed", "not just a simple # value and has some internal", "), \"adjacent_earlier_big\": ( relativedelta(years=-2, months=-2), relativedelta(years=-2), ), \"adjacent_later_big\": ( relativedelta(months=+1,", "def __getattr__(self, name): if name in self.deltas: start, end =", "models is the same as the data in the expected", "yield except exception: if not expected: raise else: if expected:", "and their current values, taking account of any fields that", "any automatically set fields and fields with names passed to", "end: date) -> Dict[str, int]: \"\"\" Construct a POST data", "that are not duplicates of each other and returns the", "in the imported model is the same as the data", "= True MEURSING_TABLES_IMPLEMENTED = False PARTIAL_TEMPORARY_STOP_IMPLEMENTED = False UTC =", "for value \"{value}\"') except Exception: raise else: if not expected_valid:", "ignore=ignore) imported_data = get_checkable_data(imported, ignore=ignore) assert expected_data == imported_data def", "change from model to model are not checked. Any field", "factory that are duplicates of each other and returns the", "per decompressed value and append the name with _0, _1,", "any linked models rather than internal PKs. For example: get_checkable_data(FootnoteDescriptionFactory(),", "**kwargs, ): if not factory and not instance: raise AssertionError(", "if field in data: del data[field] value = form.fields[field].widget.decompress(value) data.update(", "**kwargs): # test should pass normally fn(*args, **kwargs) # test", "relativedelta(years=+1, months=+1, days=+1), relativedelta(years=+1, months=+2), ), \"big\": (relativedelta(years=-2), relativedelta(years=+2, days=+1)),", "pytest.mark.skipif( not PARTIAL_TEMPORARY_STOP_IMPLEMENTED, reason=\"Partial temporary stop not implemented\", ) requires_interdependent_import", "before {cutoff:%Y-%m-%d}\") return True return do_test return decorator def validity_period_post_data(start:", "data fragment for the validity period start and end dates", "dates of a ValidityPeriodForm from the given date objects, eg:", "from io import BytesIO from itertools import count from typing", "duplicates of each other and returns the record created last.\"\"\"", "pass normally fn(*args, **kwargs) # test should fail before cutoff", "if not expected: raise else: if expected: pytest.fail(f\"Did not raise", "COMMODITIES_IMPLEMENTED = True MEURSING_TABLES_IMPLEMENTED = False PARTIAL_TEMPORARY_STOP_IMPLEMENTED = False UTC", "def now(self): return self.datetime_now.date() @property def datetime_now(self): return datetime.now(tz=UTC).replace(hour=0, minute=0,", "relativedelta(months=-1), dt + relativedelta(days=-14), ) @classmethod def medium_before(cls, dt): return", "freeze_time(cutoff + relativedelta(days=-1)): try: fn(*args, **kwargs) except pytest.fail.Exception: pass except", "INTERDEPENDENT_IMPORT_IMPLEMENTED, reason=\"Interdependent imports not implemented\", ) requires_update_importer = pytest.mark.skipif( not", "not implemented\", ) requires_partial_temporary_stop = pytest.mark.skipif( not PARTIAL_TEMPORARY_STOP_IMPLEMENTED, reason=\"Partial temporary", "(relativedelta(months=+1, days=+1), relativedelta(months=+2)), \"adjacent_no_end\": (relativedelta(months=+1, days=+1), None), \"adjacent_even_later\": ( relativedelta(months=+2,", "checked. Any field names given to `ignore` will also not", "list(factory._meta.model.identifying_fields) return factory.create( **dict(get_field_tuple(existing, field) for field in identifying_fields) )", "imported_data = [get_checkable_data(i, ignore=ignore) for i in imported] assert expected_data", "(relativedelta(), relativedelta(days=+14)), \"ends_with_normal\": (relativedelta(days=+14), relativedelta(months=+1)), \"current\": (relativedelta(weeks=-4), relativedelta(weeks=+4)), \"future\": (relativedelta(weeks=+10),", "value for every field in the imported models is the", "return factory.create( **dict(get_field_tuple(existing, field) for field in identifying_fields) ) def", "not instance: raise AssertionError( \"Either a factory or an object", "taric_schema.validate(xml) assert not taric_schema.error_log, f\"XML errors: {taric_schema.error_log}\" if check_order: validate_taric_xml_record_order(xml)", "), \"overlap_normal\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1, years=+1), ), \"overlap_normal_earlier\": (", "dt): return TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(months=+1), )", "relativedelta(months=-1), dt + relativedelta(months=+1), ) @classmethod def no_end_before(cls, dt): return", "second=0, microsecond=0) def __getattr__(self, name): if name in self.deltas: start,", "\"adjacent_later\": (relativedelta(months=+1, days=+1), relativedelta(months=+2)), \"adjacent_no_end\": (relativedelta(months=+1, days=+1), None), \"adjacent_even_later\": (", "True return do_test return decorator def validity_period_post_data(start: date, end: date)", "wraps return decorator class Dates: deltas = { \"normal\": (relativedelta(),", "= self.deltas[name] start = self.now + start if end is", "enumerate([date.day, date.month, date.year]) } def get_form_data(form: forms.ModelForm) -> Dict[str, Any]:", "dt): return TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(days=-14), )", "created last.\"\"\" existing = factory.create() # allow overriding identifying_fields if", "check_order=True, ): def decorator(func): def wraps( api_client, taric_schema, approved_transaction, valid_user,", "= {\"xml\": xml, **kwargs} func( *args, **kwargs, ) return wraps", "response = api_client.get( reverse( \"workbaskets:workbasket-detail\", kwargs={\"pk\": approved_transaction.workbasket.pk}, ), {\"format\": \"xml\"},", "expected: TrackedModel, imported: TrackedModel, ignore=frozenset(), ): \"\"\" Asserts that every", "# \"description\": \"My sample footnote text\", # \"described_footnote\": { #", "the same as the data in the expected models, and", "date) -> Dict[str, int]: \"\"\" Construct a POST data fragment", "import pytest from dateutil.parser import parse as parse_date from dateutil.relativedelta", "short_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(days=-14),", "f in identifying_fields ) return not_duplicate def get_checkable_data(model: TrackedModel, ignore=frozenset()):", "( relativedelta(days=+15), relativedelta(days=+14, months=+1, years=+1), ), \"overlap_normal_earlier\": ( relativedelta(months=-1, days=+14),", "return do_test return decorator def validity_period_post_data(start: date, end: date) ->", "relativedelta(days=-1)): try: fn(*args, **kwargs) except pytest.fail.Exception: pass except Exception: raise", "datetime import timezone from functools import wraps from io import", "{**form.initial} for field in form.rendered_fields: value = data[field] if field", "# value and has some internal structure. So we need", "name with _0, _1, # etc. This mirrors the MultiValueWidget", "names given to `ignore` will also not be checked. \"\"\"", "None: identifying_fields = list(factory._meta.model.identifying_fields) return factory.create( **dict(get_field_tuple(existing, field) for field", "True UPDATE_IMPORTER_IMPLEMENTED = True EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED = False COMMODITIES_IMPLEMENTED = True", "xml = render_to_string( template_name=\"workbaskets/taric/transaction_detail.xml\", context={ \"envelope_id\": next(_transaction_counter), \"tracked_models\": [obj], \"transaction_id\":", "A date string, or datetime object before which the test", "ignore=ignore) for e in expected] imported_data = [get_checkable_data(i, ignore=ignore) for", "def validate_taric_xml( factory=None, instance=None, factory_kwargs=None, check_order=True, ): def decorator(func): def", "try: yield except exception: if not expected: raise else: if", "tuples of (record_code, subrecord_code)\"\"\" records = xml.xpath(\".//*[local-name() = 'record']\") codes", "_1, # etc. This mirrors the MultiValueWidget in django/forms/widgets.py. if", "instance: raise AssertionError( \"Either a factory or an object instance", "COMMODITIES_IMPLEMENTED, reason=\"Commodities not implemented\", ) requires_export_refund_nomenclature = pytest.mark.skipif( not EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED,", "INTERDEPENDENT_IMPORT_IMPLEMENTED = True UPDATE_IMPORTER_IMPLEMENTED = True EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED = False COMMODITIES_IMPLEMENTED", "relativedelta(months=+1)), \"current\": (relativedelta(weeks=-4), relativedelta(weeks=+4)), \"future\": (relativedelta(weeks=+10), relativedelta(weeks=+20)), \"no_end\": (relativedelta(), None),", "reverse from freezegun import freeze_time from lxml import etree from", "AttributeError(name) @classmethod def short_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1),", "a ValidityPeriodForm from the given date objects, eg: >>> validity_period_post_data(", "is not None} ) elif value is not None: data.setdefault(field,", "that every value for every field in the imported models", "given to `ignore` will also not be checked. \"\"\" expected_data", "days=+1), relativedelta(years=+1, months=+2), ), \"big\": (relativedelta(years=-2), relativedelta(years=+2, days=+1)), \"adjacent\": (relativedelta(days=+1),", "normally fn(*args, **kwargs) # test should fail before cutoff with", ">>> datetime.date(2021, 1, 2), >>> datetime.date(2022, 3, 4), >>> )", "imported: TrackedModel, ignore=frozenset(), ): \"\"\" Asserts that every value for", "checked. \"\"\" expected_data = [get_checkable_data(e, ignore=ignore) for e in expected]", "return data def assert_records_match( expected: TrackedModel, imported: TrackedModel, ignore=frozenset(), ):", ") return [tuple(codes(record)) for record in records] def validate_taric_xml( factory=None,", "\"adjacent_even_later\": ( relativedelta(months=+2, days=+1), relativedelta(months=+3), ), \"adjacent_earlier_big\": ( relativedelta(years=-2, months=-2),", "field names given to `ignore` will also not be checked.", "Any field names given to `ignore` will also not be", "value = data[field] if field in data else form.fields[field].initial if", "\"get_identifying_fields\") } data.update(identifying_fields) return data def assert_records_match( expected: TrackedModel, imported:", "relativedelta(months=+1)), \"adjacent_earlier\": (relativedelta(months=-1), relativedelta(days=-1)), \"adjacent_later\": (relativedelta(months=+1, days=+1), relativedelta(months=+2)), \"adjacent_no_end\": (relativedelta(months=+1,", "field in data: del data[field] value = form.fields[field].widget.decompress(value) data.update( **{f\"{field}_{i}\":", "= get_checkable_data(expected, ignore=ignore) imported_data = get_checkable_data(imported, ignore=ignore) assert expected_data ==", "the validity period start and end dates of a ValidityPeriodForm", "-> BytesIO: xml = render_to_string( template_name=\"workbaskets/taric/transaction_detail.xml\", context={ \"envelope_id\": next(_transaction_counter), \"tracked_models\":", "-> Dict[str, int]: \"\"\" Construct a POST data fragment for", "provided - not both.\", ) current_instance = instance or factory.create(", "form.fields[field].widget.decompress(value) data.update( **{f\"{field}_{i}\": v for i, v in enumerate(value) if", "relativedelta(years=+3, days=+2)), \"after_big\": ( relativedelta(years=+3, months=+1), relativedelta(years=+3, months=+2), ), \"backwards\":", "xml = etree.XML(content) taric_schema.validate(xml) assert not taric_schema.error_log, f\"XML errors: {taric_schema.error_log}\"", "instance need to be provided - not both.\", ) current_instance", "etc. This mirrors the MultiValueWidget in django/forms/widgets.py. if field in", "get_form_data(form: forms.ModelForm) -> Dict[str, Any]: \"\"\"Returns a dictionary of the", "2022, } \"\"\" return { f\"{name}_{i}\": part for name, date", "the data in the expected model. System fields that will", "account of any fields that have sub-fields and hence result", "+ relativedelta(months=+1), ) @classmethod def no_end_before(cls, dt): return TaricDateRange( dt", "factory.create() # allow overriding identifying_fields if identifying_fields is None: identifying_fields", "identifying_fields = list(factory._meta.model.identifying_fields) assert any( get_field_tuple(existing, f) != get_field_tuple(not_duplicate, f)", "in data: del data[field] value = form.fields[field].widget.decompress(value) data.update( **{f\"{field}_{i}\": v", "data: del data[field] value = form.fields[field].widget.decompress(value) data.update( **{f\"{field}_{i}\": v for", "validate_taric_xml_record_order from common.util import TaricDateRange from common.util import get_accessor from", "\"future\": (relativedelta(weeks=+10), relativedelta(weeks=+20)), \"no_end\": (relativedelta(), None), \"normal_first_half\": (relativedelta(), relativedelta(days=+14)), }", "( relativedelta(days=+15), relativedelta(days=+14, months=+1), ), \"overlap_big\": (relativedelta(years=+1), relativedelta(years=+3, days=+2)), \"after_big\":", "\"xml\"}, ) assert response.status_code == 200 content = response.content xml", "generate one # form item per decompressed value and append", "), \"big\": (relativedelta(years=-2), relativedelta(years=+2, days=+1)), \"adjacent\": (relativedelta(days=+1), relativedelta(months=+1)), \"adjacent_earlier\": (relativedelta(months=-1),", "for field in identifying_fields) ) def make_non_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two", "dt + relativedelta(months=-1), dt + relativedelta(days=-14), ) @classmethod def medium_before(cls,", "are not checked. Any field names given to `ignore` will", "== 200 content = response.content xml = etree.XML(content) taric_schema.validate(xml) assert", "def get_checkable_data(model: TrackedModel, ignore=frozenset()): \"\"\" Returns a dict representing the", "from datetime import timezone from functools import wraps from io", "in data else form.fields[field].initial if hasattr(form.fields[field].widget, \"decompress\"): # If the", "factory.create( transaction=approved_transaction, **factory_kwargs or {} ) api_client.force_login(user=valid_user) response = api_client.get(", "refund nomenclature not implemented\", ) requires_meursing_tables = pytest.mark.skipif( not MEURSING_TABLES_IMPLEMENTED,", "freeze_time from lxml import etree from common.models.records import TrackedModel from", "for name in checked_field_names } identifying_fields = { name: data[name].get_identifying_fields()", "parse_date from dateutil.relativedelta import relativedelta from django import forms from", "check_validator(validate, value, expected_valid): try: validate(value) except ValidationError: if expected_valid: pytest.fail(f'Unexpected", "field in the imported models is the same as the", "date string, or datetime object before which the test should", "imported_data = get_checkable_data(imported, ignore=ignore) assert expected_data == imported_data def assert_many_records_match(", "dt): return TaricDateRange( dt + relativedelta(days=+14), dt + relativedelta(months=+1), )", "import forms from django.core.exceptions import ValidationError from django.template.loader import render_to_string", "days=+1), None), \"adjacent_even_later\": ( relativedelta(months=+2, days=+1), relativedelta(months=+3), ), \"adjacent_earlier_big\": (", "list(factory._meta.model.identifying_fields) assert any( get_field_tuple(existing, f) != get_field_tuple(not_duplicate, f) for f", "-> Dict[str, Any]: \"\"\"Returns a dictionary of the fields that", "sub-fields and hence result in multiple HTML <input> objects.\"\"\" data", "records using the passed factory that are not duplicates of", "imported_data _transaction_counter = count(start=1) def generate_test_import_xml(obj: dict) -> BytesIO: xml", "an object instance need to be provided - not both.\",", "\"no_end\": (relativedelta(), None), \"normal_first_half\": (relativedelta(), relativedelta(days=+14)), } @property def now(self):", "@classmethod def no_end_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), None,", "v for i, v in enumerate(value) if v is not", "relativedelta(days=-1)), \"adjacent_later\": (relativedelta(months=+1, days=+1), relativedelta(months=+2)), \"adjacent_no_end\": (relativedelta(months=+1, days=+1), None), \"adjacent_even_later\":", "(relativedelta(years=-2), relativedelta(years=+2, days=+1)), \"adjacent\": (relativedelta(days=+1), relativedelta(months=+1)), \"adjacent_earlier\": (relativedelta(months=-1), relativedelta(days=-1)), \"adjacent_later\":", "footnote text\", # \"described_footnote\": { # \"footnote_type__footnote_type_id\": \"FN\" # \"footnote_id\":", "validity period start and end dates of a ValidityPeriodForm from", "3, \"end_date_2\": 2022, } \"\"\" return { f\"{name}_{i}\": part for", "internal PKs. For example: get_checkable_data(FootnoteDescriptionFactory(), ignore={\"sid\"}) # { # \"description\":", "in identifying_fields ) return not_duplicate def get_checkable_data(model: TrackedModel, ignore=frozenset()): \"\"\"", "\"\"\"Creates two records using the passed factory that are not", "\"starts_with_normal\": (relativedelta(), relativedelta(days=+14)), \"ends_with_normal\": (relativedelta(days=+14), relativedelta(months=+1)), \"current\": (relativedelta(weeks=-4), relativedelta(weeks=+4)), \"future\":", "date from datetime import datetime from datetime import timezone from", "name in checked_field_names if hasattr(data[name], \"get_identifying_fields\") } data.update(identifying_fields) return data", "item per decompressed value and append the name with _0,", "import validate_taric_xml_record_order from common.util import TaricDateRange from common.util import get_accessor", "part for name, date in ((\"start_date\", start), (\"end_date\", end)) for", "or factory.create( transaction=approved_transaction, **factory_kwargs or {} ) api_client.force_login(user=valid_user) response =", "is the same as the data in the expected models,", "= factory.create() not_duplicate = factory.create() if identifying_fields is None: identifying_fields", "and end dates of a ValidityPeriodForm from the given date", "= False UTC = timezone.utc requires_commodities = pytest.mark.skipif( not COMMODITIES_IMPLEMENTED,", "TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(days=-14), ) @classmethod def", "fn(*args, **kwargs) except pytest.fail.Exception: pass except Exception: raise else: pytest.fail(f\"Rule", "relativedelta(days=+1)), \"starts_with_normal\": (relativedelta(), relativedelta(days=+14)), \"ends_with_normal\": (relativedelta(days=+14), relativedelta(months=+1)), \"current\": (relativedelta(weeks=-4), relativedelta(weeks=+4)),", "validate(value) except ValidationError: if expected_valid: pytest.fail(f'Unexpected validation error for value", "f) for f in identifying_fields ) return not_duplicate def get_checkable_data(model:", "relativedelta(years=-2), ), \"adjacent_later_big\": ( relativedelta(months=+1, days=+1), relativedelta(years=+2, months=+2), ), \"overlap_normal\":", "Any from typing import Dict from typing import Sequence import", "System fields that will change from model to model are", "= pytest.mark.skipif( not MEURSING_TABLES_IMPLEMENTED, reason=\"Meursing tables not implemented\", ) requires_partial_temporary_stop", "( relativedelta(years=+1, months=+1, days=+1), relativedelta(years=+1, months=+2), ), \"big\": (relativedelta(years=-2), relativedelta(years=+2,", "fn(*args, **kwargs) # test should fail before cutoff with freeze_time(cutoff", "for name, date in ((\"start_date\", start), (\"end_date\", end)) for i,", "have sub-fields and hence result in multiple HTML <input> objects.\"\"\"", "\"described_footnote\": { # \"footnote_type__footnote_type_id\": \"FN\" # \"footnote_id\": \"123\", # },", "factory.create() not_duplicate = factory.create() if identifying_fields is None: identifying_fields =", "\"overlap_normal_earlier\": ( relativedelta(months=-1, days=+14), relativedelta(days=+14), ), \"overlap_normal_same_year\": ( relativedelta(days=+15), relativedelta(days=+14,", "pytest.mark.skipif( not COMMODITIES_IMPLEMENTED, reason=\"Commodities not implemented\", ) requires_export_refund_nomenclature = pytest.mark.skipif(", "minute=0, second=0, microsecond=0) def __getattr__(self, name): if name in self.deltas:", "for any linked models rather than internal PKs. For example:", "\"\"\"Creates two records using the passed factory that are duplicates", "), \"adjacent_later_big\": ( relativedelta(months=+1, days=+1), relativedelta(years=+2, months=+2), ), \"overlap_normal\": (", "import freeze_time from lxml import etree from common.models.records import TrackedModel", "identifying_fields ) return not_duplicate def get_checkable_data(model: TrackedModel, ignore=frozenset()): \"\"\" Returns", "approved_transaction.workbasket.pk}, ), {\"format\": \"xml\"}, ) assert response.status_code == 200 content", "Dict from typing import Sequence import pytest from dateutil.parser import", "TaricDateRange( dt + relativedelta(days=+14), dt + relativedelta(months=+1), ) @classmethod def", "asserts that a test fails after a specified cutoff date.", "dt + relativedelta(days=-1), ) @classmethod def short_after(cls, dt): return TaricDateRange(", "medium_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(days=-1),", "start = self.now + start if end is not None:", "# test should fail before cutoff with freeze_time(cutoff + relativedelta(days=-1)):", "None} ) elif value is not None: data.setdefault(field, value) return", "fields that will change from model to model are not", "assert not taric_schema.error_log, f\"XML errors: {taric_schema.error_log}\" if check_order: validate_taric_xml_record_order(xml) kwargs", "if end is not None: end = self.now + end", "= pytest.mark.skipif( not PARTIAL_TEMPORARY_STOP_IMPLEMENTED, reason=\"Partial temporary stop not implemented\", )", "): def decorator(func): def wraps( api_client, taric_schema, approved_transaction, valid_user, *args,", ") def only_applicable_after(cutoff): \"\"\" Decorator which asserts that a test", "a specified cutoff date. :param cutoff: A date string, or", "identifying_fields = list(factory._meta.model.identifying_fields) return factory.create( **dict(get_field_tuple(existing, field) for field in", "= form.fields[field].widget.decompress(value) data.update( **{f\"{field}_{i}\": v for i, v in enumerate(value)", "data = { name: getattr(model, get_accessor(model._meta.get_field(name))) for name in checked_field_names", "the model's data ignoring any automatically set fields and fields", "and that the count of both is equal. System fields", "\"\"\" return { f\"{name}_{i}\": part for name, date in ((\"start_date\",", "(record_code, subrecord_code)\"\"\" records = xml.xpath(\".//*[local-name() = 'record']\") codes = etree.XPath(", "UPDATE_IMPORTER_IMPLEMENTED = True EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED = False COMMODITIES_IMPLEMENTED = True MEURSING_TABLES_IMPLEMENTED", "of both is equal. System fields that will change from", "relativedelta(days=+15), relativedelta(days=+14, months=+1, years=+1), ), \"overlap_normal_earlier\": ( relativedelta(months=-1, days=+14), relativedelta(days=+14),", "passed to `ignore`. The returned data will contain the identifying", "(relativedelta(months=+1, days=+1), None), \"adjacent_even_later\": ( relativedelta(months=+2, days=+1), relativedelta(months=+3), ), \"adjacent_earlier_big\":", "date. :param cutoff: A date string, or datetime object before", "counter_generator(), \"counter_generator\": counter_generator, }, ) return BytesIO(xml.encode()) def taric_xml_record_codes(xml): \"\"\"Yields", "of (record_code, subrecord_code)\"\"\" records = xml.xpath(\".//*[local-name() = 'record']\") codes =", "Exception: raise else: if not expected_valid: pytest.fail(f'Expected validation error for", "*args, **kwargs, ) return wraps return decorator class Dates: deltas", "days=+1), relativedelta(months=+2)), \"adjacent_no_end\": (relativedelta(months=+1, days=+1), None), \"adjacent_even_later\": ( relativedelta(months=+2, days=+1),", "common.serializers import validate_taric_xml_record_order from common.util import TaricDateRange from common.util import", "} identifying_fields = { name: data[name].get_identifying_fields() for name in checked_field_names", "in expected] imported_data = [get_checkable_data(i, ignore=ignore) for i in imported]", "with _0, _1, # etc. This mirrors the MultiValueWidget in", "lxml import etree from common.models.records import TrackedModel from common.renderers import", "not duplicates of each other and returns the record created", "# \"footnote_id\": \"123\", # }, # } \"\"\" checked_field_names =", "a factory or an object instance need to be provided\",", "itertools import count from typing import Any from typing import", "Asserts that every value for every field in the imported", "their current values, taking account of any fields that have", "in the expected model. System fields that will change from", "+ relativedelta(months=-1), dt + relativedelta(months=+1), ) @classmethod def no_end_before(cls, dt):", "+ relativedelta(days=-1), ) @classmethod def short_after(cls, dt): return TaricDateRange( dt", "hence result in multiple HTML <input> objects.\"\"\" data = {**form.initial}", "from the given date objects, eg: >>> validity_period_post_data( >>> datetime.date(2021,", "}, # } \"\"\" checked_field_names = {f.name for f in", "enumerate(value) if v is not None} ) elif value is", "validation error for value \"{value}\"') def make_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two", "string, or datetime object before which the test should fail.", "} \"\"\" return { f\"{name}_{i}\": part for name, date in", "relativedelta(months=-1, days=+14), relativedelta(days=+14), ), \"overlap_normal_same_year\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1), ),", "expected model. System fields that will change from model to", "model is the same as the data in the expected", "= xml.xpath(\".//*[local-name() = 'record']\") codes = etree.XPath( \".//*[local-name()='record.code' or local-name()='subrecord.code']/text()\",", "api_client.get( reverse( \"workbaskets:workbasket-detail\", kwargs={\"pk\": approved_transaction.workbasket.pk}, ), {\"format\": \"xml\"}, ) assert", "2, \"start_date_2\": 2021, \"end_date_0\": 4, \"end_date_1\": 3, \"end_date_2\": 2022, }", "that every value for every field in the imported model", "objects.\"\"\" data = {**form.initial} for field in form.rendered_fields: value =", "), {\"format\": \"xml\"}, ) assert response.status_code == 200 content =", "func( *args, **kwargs, ) return wraps return decorator class Dates:", "not taric_schema.error_log, f\"XML errors: {taric_schema.error_log}\" if check_order: validate_taric_xml_record_order(xml) kwargs =", "\"adjacent_earlier\": (relativedelta(months=-1), relativedelta(days=-1)), \"adjacent_later\": (relativedelta(months=+1, days=+1), relativedelta(months=+2)), \"adjacent_no_end\": (relativedelta(months=+1, days=+1),", "TrackedModel, ignore=frozenset()): \"\"\" Returns a dict representing the model's data", "to be provided - not both.\", ) current_instance = instance", "\"start_date_0\": 1, \"start_date_1\": 2, \"start_date_2\": 2021, \"end_date_0\": 4, \"end_date_1\": 3,", "other and returns the record created last.\"\"\" existing = factory.create()", "return [tuple(codes(record)) for record in records] def validate_taric_xml( factory=None, instance=None,", "cutoff: A date string, or datetime object before which the", "django import forms from django.core.exceptions import ValidationError from django.template.loader import", "next(_transaction_counter), \"message_counter\": counter_generator(), \"counter_generator\": counter_generator, }, ) return BytesIO(xml.encode()) def", "in enumerate([date.day, date.month, date.year]) } def get_form_data(form: forms.ModelForm) -> Dict[str,", "is equal. System fields that will change from model to", "def decorator(fn): @wraps(fn) def do_test(*args, **kwargs): # test should pass", "has some internal structure. So we need to generate one", "def no_end_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), None, )", "1, 2), >>> datetime.date(2022, 3, 4), >>> ) { \"start_date_0\":", "returned data will contain the identifying fields for any linked", "return wraps return decorator class Dates: deltas = { \"normal\":", "\"\"\" expected_data = get_checkable_data(expected, ignore=ignore) imported_data = get_checkable_data(imported, ignore=ignore) assert", "field in the imported model is the same as the", "import date from datetime import datetime from datetime import timezone", "expected_data == imported_data _transaction_counter = count(start=1) def generate_test_import_xml(obj: dict) ->", "For example: get_checkable_data(FootnoteDescriptionFactory(), ignore={\"sid\"}) # { # \"description\": \"My sample", "} \"\"\" checked_field_names = {f.name for f in model.copyable_fields} -", "ignore data = { name: getattr(model, get_accessor(model._meta.get_field(name))) for name in", "def short_overlap(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), dt +", "test should pass normally fn(*args, **kwargs) # test should fail", "kwargs = {\"xml\": xml, **kwargs} func( *args, **kwargs, ) return", "False PARTIAL_TEMPORARY_STOP_IMPLEMENTED = False UTC = timezone.utc requires_commodities = pytest.mark.skipif(", "\"description\": \"My sample footnote text\", # \"described_footnote\": { # \"footnote_type__footnote_type_id\":", "the MultiValueWidget in django/forms/widgets.py. if field in data: del data[field]", "taric_schema, approved_transaction, valid_user, *args, **kwargs, ): if not factory and", "\"current\": (relativedelta(weeks=-4), relativedelta(weeks=+4)), \"future\": (relativedelta(weeks=+10), relativedelta(weeks=+20)), \"no_end\": (relativedelta(), None), \"normal_first_half\":", "Construct a POST data fragment for the validity period start", "pytest.mark.skipif( not EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED, reason=\"Export refund nomenclature not implemented\", ) requires_meursing_tables", "structure. So we need to generate one # form item", "Returns a dict representing the model's data ignoring any automatically", "get_field_tuple INTERDEPENDENT_IMPORT_IMPLEMENTED = True UPDATE_IMPORTER_IMPLEMENTED = True EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED = False", "instance=None, factory_kwargs=None, check_order=True, ): def decorator(func): def wraps( api_client, taric_schema,", "reverse( \"workbaskets:workbasket-detail\", kwargs={\"pk\": approved_transaction.workbasket.pk}, ), {\"format\": \"xml\"}, ) assert response.status_code", "decorator class Dates: deltas = { \"normal\": (relativedelta(), relativedelta(months=+1)), \"earlier\":", "returns the record created last.\"\"\" existing = factory.create() # allow", "typing import Any from typing import Dict from typing import", "self.datetime_now.date() @property def datetime_now(self): return datetime.now(tz=UTC).replace(hour=0, minute=0, second=0, microsecond=0) def", "in identifying_fields) ) def make_non_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records using", "}, ) return BytesIO(xml.encode()) def taric_xml_record_codes(xml): \"\"\"Yields tuples of (record_code,", "raise AttributeError(name) @classmethod def short_before(cls, dt): return TaricDateRange( dt +", "test fails after a specified cutoff date. :param cutoff: A", "= list(factory._meta.model.identifying_fields) assert any( get_field_tuple(existing, f) != get_field_tuple(not_duplicate, f) for", "POST data fragment for the validity period start and end", "self.now + start if end is not None: end =", "render_to_string from django.urls import reverse from freezegun import freeze_time from", "rather than internal PKs. For example: get_checkable_data(FootnoteDescriptionFactory(), ignore={\"sid\"}) # {", "from dateutil.relativedelta import relativedelta from django import forms from django.core.exceptions", "if v is not None} ) elif value is not", "in checked_field_names } identifying_fields = { name: data[name].get_identifying_fields() for name", "a page and their current values, taking account of any", "set fields and fields with names passed to `ignore`. The", "relativedelta(months=+1), ) @classmethod def short_overlap(cls, dt): return TaricDateRange( dt +", "pytest.fail(f'Unexpected validation error for value \"{value}\"') except Exception: raise else:", "relativedelta(months=+2)), \"adjacent_no_end\": (relativedelta(months=+1, days=+1), None), \"adjacent_even_later\": ( relativedelta(months=+2, days=+1), relativedelta(months=+3),", "and returns the record created last.\"\"\" existing = factory.create() #", "to be provided\", ) if factory and instance: raise AssertionError(", "if hasattr(data[name], \"get_identifying_fields\") } data.update(identifying_fields) return data def assert_records_match( expected:", "using the passed factory that are duplicates of each other", "also not be checked. \"\"\" expected_data = [get_checkable_data(e, ignore=ignore) for", "xml.xpath(\".//*[local-name() = 'record']\") codes = etree.XPath( \".//*[local-name()='record.code' or local-name()='subrecord.code']/text()\", )", "Any]: \"\"\"Returns a dictionary of the fields that the form", "the given date objects, eg: >>> validity_period_post_data( >>> datetime.date(2021, 1,", "pytest from dateutil.parser import parse as parse_date from dateutil.relativedelta import", "False COMMODITIES_IMPLEMENTED = True MEURSING_TABLES_IMPLEMENTED = False PARTIAL_TEMPORARY_STOP_IMPLEMENTED = False", "Dates: deltas = { \"normal\": (relativedelta(), relativedelta(months=+1)), \"earlier\": (relativedelta(years=-1), relativedelta(years=-1,", "\"later\": ( relativedelta(years=+1, months=+1, days=+1), relativedelta(years=+1, months=+2), ), \"big\": (relativedelta(years=-2),", ">>> validity_period_post_data( >>> datetime.date(2021, 1, 2), >>> datetime.date(2022, 3, 4),", "- not both.\", ) current_instance = instance or factory.create( transaction=approved_transaction,", "in self.deltas: start, end = self.deltas[name] start = self.now +", "eg: >>> validity_period_post_data( >>> datetime.date(2021, 1, 2), >>> datetime.date(2022, 3,", "for e in expected] imported_data = [get_checkable_data(i, ignore=ignore) for i", "api_client.force_login(user=valid_user) response = api_client.get( reverse( \"workbaskets:workbasket-detail\", kwargs={\"pk\": approved_transaction.workbasket.pk}, ), {\"format\":", "identifying_fields is None: identifying_fields = list(factory._meta.model.identifying_fields) return factory.create( **dict(get_field_tuple(existing, field)", "one # form item per decompressed value and append the", "== imported_data def assert_many_records_match( expected: Sequence[TrackedModel], imported: Sequence[TrackedModel], ignore=frozenset(), ):", "pytest.fail(f\"Rule applied before {cutoff:%Y-%m-%d}\") return True return do_test return decorator", "then it is not just a simple # value and", "from common.util import get_field_tuple INTERDEPENDENT_IMPORT_IMPLEMENTED = True UPDATE_IMPORTER_IMPLEMENTED = True", "_transaction_counter = count(start=1) def generate_test_import_xml(obj: dict) -> BytesIO: xml =", "timezone from functools import wraps from io import BytesIO from", "from dateutil.parser import parse as parse_date from dateutil.relativedelta import relativedelta", "\"{value}\"') except Exception: raise else: if not expected_valid: pytest.fail(f'Expected validation", "data.update(identifying_fields) return data def assert_records_match( expected: TrackedModel, imported: TrackedModel, ignore=frozenset(),", "= { name: getattr(model, get_accessor(model._meta.get_field(name))) for name in checked_field_names }", "= 'record']\") codes = etree.XPath( \".//*[local-name()='record.code' or local-name()='subrecord.code']/text()\", ) return", "common.renderers import counter_generator from common.serializers import validate_taric_xml_record_order from common.util import", "{} ) api_client.force_login(user=valid_user) response = api_client.get( reverse( \"workbaskets:workbasket-detail\", kwargs={\"pk\": approved_transaction.workbasket.pk},", "etree from common.models.records import TrackedModel from common.renderers import counter_generator from", "an object instance need to be provided\", ) if factory", "are not duplicates of each other and returns the record", "relativedelta(days=+14, months=+1), ), \"overlap_big\": (relativedelta(years=+1), relativedelta(years=+3, days=+2)), \"after_big\": ( relativedelta(years=+3,", "import parse as parse_date from dateutil.relativedelta import relativedelta from django", "object instance need to be provided\", ) if factory and", "same as the data in the expected models, and that", "get_accessor from common.util import get_field_tuple INTERDEPENDENT_IMPORT_IMPLEMENTED = True UPDATE_IMPORTER_IMPLEMENTED =", "sample footnote text\", # \"described_footnote\": { # \"footnote_type__footnote_type_id\": \"FN\" #", "def make_non_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records using the passed factory", "imported_data def assert_many_records_match( expected: Sequence[TrackedModel], imported: Sequence[TrackedModel], ignore=frozenset(), ): \"\"\"", "except pytest.fail.Exception: pass except Exception: raise else: pytest.fail(f\"Rule applied before", "the fields that the form will put onto a page", ") return not_duplicate def get_checkable_data(model: TrackedModel, ignore=frozenset()): \"\"\" Returns a", "[obj], \"transaction_id\": next(_transaction_counter), \"message_counter\": counter_generator(), \"counter_generator\": counter_generator, }, ) return", "\"footnote_id\": \"123\", # }, # } \"\"\" checked_field_names = {f.name", "return TaricDateRange( dt + relativedelta(days=+14), dt + relativedelta(months=+1), ) @classmethod", "ValidationError: if expected_valid: pytest.fail(f'Unexpected validation error for value \"{value}\"') except", "for the validity period start and end dates of a", "return TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(days=-1), ) @classmethod", "reason=\"Interdependent imports not implemented\", ) requires_update_importer = pytest.mark.skipif( not UPDATE_IMPORTER_IMPLEMENTED,", "every field in the imported models is the same as", "be provided\", ) if factory and instance: raise AssertionError( \"Either", "reason=\"Requires Updating importers to be implemented\", ) @contextlib.contextmanager def raises_if(exception,", "validate_taric_xml_record_order(xml) kwargs = {\"xml\": xml, **kwargs} func( *args, **kwargs, )", "after a specified cutoff date. :param cutoff: A date string,", "end is not None: end = self.now + end return", "decompressed value and append the name with _0, _1, #", "(relativedelta(months=-1), relativedelta(days=-1)), \"adjacent_later\": (relativedelta(months=+1, days=+1), relativedelta(months=+2)), \"adjacent_no_end\": (relativedelta(months=+1, days=+1), None),", "page and their current values, taking account of any fields", "not implemented\", ) requires_update_importer = pytest.mark.skipif( not UPDATE_IMPORTER_IMPLEMENTED, reason=\"Requires Updating", "value, expected_valid): try: validate(value) except ValidationError: if expected_valid: pytest.fail(f'Unexpected validation", ") elif value is not None: data.setdefault(field, value) return data", "the expected models, and that the count of both is", ") current_instance = instance or factory.create( transaction=approved_transaction, **factory_kwargs or {}", "form item per decompressed value and append the name with", "not checked. Any field names given to `ignore` will also", "dt + relativedelta(months=+1), ) @classmethod def short_overlap(cls, dt): return TaricDateRange(", "end return TaricDateRange(start, end) raise AttributeError(name) @classmethod def short_before(cls, dt):", "instance or factory.create( transaction=approved_transaction, **factory_kwargs or {} ) api_client.force_login(user=valid_user) response", "imported models is the same as the data in the", "\"adjacent\": (relativedelta(days=+1), relativedelta(months=+1)), \"adjacent_earlier\": (relativedelta(months=-1), relativedelta(days=-1)), \"adjacent_later\": (relativedelta(months=+1, days=+1), relativedelta(months=+2)),", "months=+1), relativedelta(years=+3, months=+2), ), \"backwards\": (relativedelta(months=+1), relativedelta(days=+1)), \"starts_with_normal\": (relativedelta(), relativedelta(days=+14)),", "imported: Sequence[TrackedModel], ignore=frozenset(), ): \"\"\" Asserts that every value for", "int]: \"\"\" Construct a POST data fragment for the validity", "ignore=frozenset()): \"\"\" Returns a dict representing the model's data ignoring", "= factory.create() if identifying_fields is None: identifying_fields = list(factory._meta.model.identifying_fields) assert", "name: getattr(model, get_accessor(model._meta.get_field(name))) for name in checked_field_names } identifying_fields =", ":param cutoff: A date string, or datetime object before which", "= self.now + end return TaricDateRange(start, end) raise AttributeError(name) @classmethod", "@wraps(fn) def do_test(*args, **kwargs): # test should pass normally fn(*args,", "dateutil.relativedelta import relativedelta from django import forms from django.core.exceptions import", "imported model is the same as the data in the", "def validity_period_post_data(start: date, end: date) -> Dict[str, int]: \"\"\" Construct", "not factory and not instance: raise AssertionError( \"Either a factory", "**kwargs) # test should fail before cutoff with freeze_time(cutoff +", "io import BytesIO from itertools import count from typing import", "same as the data in the expected model. System fields", ") @classmethod def no_end_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1),", "of a ValidityPeriodForm from the given date objects, eg: >>>", "relativedelta(days=-14), ) @classmethod def medium_before(cls, dt): return TaricDateRange( dt +", "for name in checked_field_names if hasattr(data[name], \"get_identifying_fields\") } data.update(identifying_fields) return", "= etree.XPath( \".//*[local-name()='record.code' or local-name()='subrecord.code']/text()\", ) return [tuple(codes(record)) for record", "factory or an object instance need to be provided -", "if hasattr(form.fields[field].widget, \"decompress\"): # If the widget can be decompressed,", "factory and not instance: raise AssertionError( \"Either a factory or", "{\"xml\": xml, **kwargs} func( *args, **kwargs, ) return wraps return", "def only_applicable_after(cutoff): \"\"\" Decorator which asserts that a test fails", "{ # \"footnote_type__footnote_type_id\": \"FN\" # \"footnote_id\": \"123\", # }, #", "pytest.mark.skipif( not UPDATE_IMPORTER_IMPLEMENTED, reason=\"Requires Updating importers to be implemented\", )", "get_accessor(model._meta.get_field(name))) for name in checked_field_names } identifying_fields = { name:", "{ \"normal\": (relativedelta(), relativedelta(months=+1)), \"earlier\": (relativedelta(years=-1), relativedelta(years=-1, months=+1)), \"later\": (", "representing the model's data ignoring any automatically set fields and", "get_field_tuple(not_duplicate, f) for f in identifying_fields ) return not_duplicate def", "any( get_field_tuple(existing, f) != get_field_tuple(not_duplicate, f) for f in identifying_fields", "2), >>> datetime.date(2022, 3, 4), >>> ) { \"start_date_0\": 1,", "Dict[str, Any]: \"\"\"Returns a dictionary of the fields that the", "form.rendered_fields: value = data[field] if field in data else form.fields[field].initial", "get_field_tuple(existing, f) != get_field_tuple(not_duplicate, f) for f in identifying_fields )", "expected: Sequence[TrackedModel], imported: Sequence[TrackedModel], ignore=frozenset(), ): \"\"\" Asserts that every", "raises_if(exception, expected): try: yield except exception: if not expected: raise", "), \"backwards\": (relativedelta(months=+1), relativedelta(days=+1)), \"starts_with_normal\": (relativedelta(), relativedelta(days=+14)), \"ends_with_normal\": (relativedelta(days=+14), relativedelta(months=+1)),", "{ f\"{name}_{i}\": part for name, date in ((\"start_date\", start), (\"end_date\",", "not implemented\", ) requires_interdependent_import = pytest.mark.skipif( not INTERDEPENDENT_IMPORT_IMPLEMENTED, reason=\"Interdependent imports", "the expected model. System fields that will change from model", "\"\"\" checked_field_names = {f.name for f in model.copyable_fields} - ignore", "should pass normally fn(*args, **kwargs) # test should fail before", "raise else: pytest.fail(f\"Rule applied before {cutoff:%Y-%m-%d}\") return True return do_test", "\"overlap_big\": (relativedelta(years=+1), relativedelta(years=+3, days=+2)), \"after_big\": ( relativedelta(years=+3, months=+1), relativedelta(years=+3, months=+2),", "{ \"start_date_0\": 1, \"start_date_1\": 2, \"start_date_2\": 2021, \"end_date_0\": 4, \"end_date_1\":", "do_test return decorator def validity_period_post_data(start: date, end: date) -> Dict[str,", "that have sub-fields and hence result in multiple HTML <input>", "# \"footnote_type__footnote_type_id\": \"FN\" # \"footnote_id\": \"123\", # }, # }", "assert_many_records_match( expected: Sequence[TrackedModel], imported: Sequence[TrackedModel], ignore=frozenset(), ): \"\"\" Asserts that", "response.content xml = etree.XML(content) taric_schema.validate(xml) assert not taric_schema.error_log, f\"XML errors:", "not be checked. \"\"\" expected_data = get_checkable_data(expected, ignore=ignore) imported_data =", ") @classmethod def short_overlap(cls, dt): return TaricDateRange( dt + relativedelta(months=-1),", "dateutil.parser import parse as parse_date from dateutil.relativedelta import relativedelta from", "two records using the passed factory that are not duplicates", "in checked_field_names if hasattr(data[name], \"get_identifying_fields\") } data.update(identifying_fields) return data def", "import render_to_string from django.urls import reverse from freezegun import freeze_time", "\"counter_generator\": counter_generator, }, ) return BytesIO(xml.encode()) def taric_xml_record_codes(xml): \"\"\"Yields tuples", "def do_test(*args, **kwargs): # test should pass normally fn(*args, **kwargs)", "del data[field] value = form.fields[field].widget.decompress(value) data.update( **{f\"{field}_{i}\": v for i,", "widget can be decompressed, then it is not just a", "and instance: raise AssertionError( \"Either a factory or an object", "importers to be implemented\", ) @contextlib.contextmanager def raises_if(exception, expected): try:", "Sequence import pytest from dateutil.parser import parse as parse_date from", "before cutoff with freeze_time(cutoff + relativedelta(days=-1)): try: fn(*args, **kwargs) except", "need to generate one # form item per decompressed value", "records] def validate_taric_xml( factory=None, instance=None, factory_kwargs=None, check_order=True, ): def decorator(func):", "\"transaction_id\": next(_transaction_counter), \"message_counter\": counter_generator(), \"counter_generator\": counter_generator, }, ) return BytesIO(xml.encode())", "expected_valid: pytest.fail(f'Expected validation error for value \"{value}\"') def make_duplicate_record(factory, identifying_fields=None):", "reason=\"Partial temporary stop not implemented\", ) requires_interdependent_import = pytest.mark.skipif( not", "to be implemented\", ) @contextlib.contextmanager def raises_if(exception, expected): try: yield", "exception: if not expected: raise else: if expected: pytest.fail(f\"Did not", "if factory and instance: raise AssertionError( \"Either a factory or", "\"envelope_id\": next(_transaction_counter), \"tracked_models\": [obj], \"transaction_id\": next(_transaction_counter), \"message_counter\": counter_generator(), \"counter_generator\": counter_generator,", "the test should fail. \"\"\" cutoff = parse_date(cutoff) def decorator(fn):", "from datetime import date from datetime import datetime from datetime", "self.deltas[name] start = self.now + start if end is not", "v is not None} ) elif value is not None:", "TrackedModel, ignore=frozenset(), ): \"\"\" Asserts that every value for every", "that the count of both is equal. System fields that", "= count(start=1) def generate_test_import_xml(obj: dict) -> BytesIO: xml = render_to_string(", "data will contain the identifying fields for any linked models", "[get_checkable_data(e, ignore=ignore) for e in expected] imported_data = [get_checkable_data(i, ignore=ignore)", "dt + relativedelta(days=+14), dt + relativedelta(months=+1), ) @classmethod def short_overlap(cls,", "subrecord_code)\"\"\" records = xml.xpath(\".//*[local-name() = 'record']\") codes = etree.XPath( \".//*[local-name()='record.code'", "dict) -> BytesIO: xml = render_to_string( template_name=\"workbaskets/taric/transaction_detail.xml\", context={ \"envelope_id\": next(_transaction_counter),", "assert any( get_field_tuple(existing, f) != get_field_tuple(not_duplicate, f) for f in", "return TaricDateRange(start, end) raise AttributeError(name) @classmethod def short_before(cls, dt): return", "requires_partial_temporary_stop = pytest.mark.skipif( not PARTIAL_TEMPORARY_STOP_IMPLEMENTED, reason=\"Partial temporary stop not implemented\",", "= [get_checkable_data(e, ignore=ignore) for e in expected] imported_data = [get_checkable_data(i,", "expected: pytest.fail(f\"Did not raise {exception}\") def check_validator(validate, value, expected_valid): try:", "in ((\"start_date\", start), (\"end_date\", end)) for i, part in enumerate([date.day,", "pytest.mark.skipif( not INTERDEPENDENT_IMPORT_IMPLEMENTED, reason=\"Interdependent imports not implemented\", ) requires_update_importer =", "<input> objects.\"\"\" data = {**form.initial} for field in form.rendered_fields: value", "try: validate(value) except ValidationError: if expected_valid: pytest.fail(f'Unexpected validation error for", "\"end_date_1\": 3, \"end_date_2\": 2022, } \"\"\" return { f\"{name}_{i}\": part", "except Exception: raise else: if not expected_valid: pytest.fail(f'Expected validation error", "@property def now(self): return self.datetime_now.date() @property def datetime_now(self): return datetime.now(tz=UTC).replace(hour=0,", "mirrors the MultiValueWidget in django/forms/widgets.py. if field in data: del", "form will put onto a page and their current values,", "TrackedModel from common.renderers import counter_generator from common.serializers import validate_taric_xml_record_order from", "raise {exception}\") def check_validator(validate, value, expected_valid): try: validate(value) except ValidationError:", "None), \"adjacent_even_later\": ( relativedelta(months=+2, days=+1), relativedelta(months=+3), ), \"adjacent_earlier_big\": ( relativedelta(years=-2,", "decorator(fn): @wraps(fn) def do_test(*args, **kwargs): # test should pass normally", "The returned data will contain the identifying fields for any", "assert response.status_code == 200 content = response.content xml = etree.XML(content)", "reason=\"Meursing tables not implemented\", ) requires_partial_temporary_stop = pytest.mark.skipif( not PARTIAL_TEMPORARY_STOP_IMPLEMENTED,", "f) != get_field_tuple(not_duplicate, f) for f in identifying_fields ) return", "make_non_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records using the passed factory that", "**factory_kwargs or {} ) api_client.force_login(user=valid_user) response = api_client.get( reverse( \"workbaskets:workbasket-detail\",", "{ # \"description\": \"My sample footnote text\", # \"described_footnote\": {", "result in multiple HTML <input> objects.\"\"\" data = {**form.initial} for", "identifying_fields if identifying_fields is None: identifying_fields = list(factory._meta.model.identifying_fields) return factory.create(", "if identifying_fields is None: identifying_fields = list(factory._meta.model.identifying_fields) assert any( get_field_tuple(existing,", "import Sequence import pytest from dateutil.parser import parse as parse_date", "or datetime object before which the test should fail. \"\"\"", "if field in data else form.fields[field].initial if hasattr(form.fields[field].widget, \"decompress\"): #", "TaricDateRange from common.util import get_accessor from common.util import get_field_tuple INTERDEPENDENT_IMPORT_IMPLEMENTED", "(relativedelta(years=+1), relativedelta(years=+3, days=+2)), \"after_big\": ( relativedelta(years=+3, months=+1), relativedelta(years=+3, months=+2), ),", "relativedelta(days=+14)), \"ends_with_normal\": (relativedelta(days=+14), relativedelta(months=+1)), \"current\": (relativedelta(weeks=-4), relativedelta(weeks=+4)), \"future\": (relativedelta(weeks=+10), relativedelta(weeks=+20)),", "datetime.date(2022, 3, 4), >>> ) { \"start_date_0\": 1, \"start_date_1\": 2,", "name: data[name].get_identifying_fields() for name in checked_field_names if hasattr(data[name], \"get_identifying_fields\") }", "fail before cutoff with freeze_time(cutoff + relativedelta(days=-1)): try: fn(*args, **kwargs)", "= pytest.mark.skipif( not EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED, reason=\"Export refund nomenclature not implemented\", )", "applied before {cutoff:%Y-%m-%d}\") return True return do_test return decorator def", "the record created last.\"\"\" existing = factory.create() # allow overriding", "provided\", ) if factory and instance: raise AssertionError( \"Either a", "{f.name for f in model.copyable_fields} - ignore data = {", "import counter_generator from common.serializers import validate_taric_xml_record_order from common.util import TaricDateRange", "days=+1), relativedelta(months=+3), ), \"adjacent_earlier_big\": ( relativedelta(years=-2, months=-2), relativedelta(years=-2), ), \"adjacent_later_big\":", "model's data ignoring any automatically set fields and fields with", "**{f\"{field}_{i}\": v for i, v in enumerate(value) if v is", "of each other and returns the record created last.\"\"\" existing", "example: get_checkable_data(FootnoteDescriptionFactory(), ignore={\"sid\"}) # { # \"description\": \"My sample footnote", "deltas = { \"normal\": (relativedelta(), relativedelta(months=+1)), \"earlier\": (relativedelta(years=-1), relativedelta(years=-1, months=+1)),", "will also not be checked. \"\"\" expected_data = get_checkable_data(expected, ignore=ignore)", "= self.now + start if end is not None: end", "records = xml.xpath(\".//*[local-name() = 'record']\") codes = etree.XPath( \".//*[local-name()='record.code' or", "relativedelta(months=+2, days=+1), relativedelta(months=+3), ), \"adjacent_earlier_big\": ( relativedelta(years=-2, months=-2), relativedelta(years=-2), ),", ") requires_interdependent_import = pytest.mark.skipif( not INTERDEPENDENT_IMPORT_IMPLEMENTED, reason=\"Interdependent imports not implemented\",", "end) raise AttributeError(name) @classmethod def short_before(cls, dt): return TaricDateRange( dt", "(\"end_date\", end)) for i, part in enumerate([date.day, date.month, date.year]) }", "date.month, date.year]) } def get_form_data(form: forms.ModelForm) -> Dict[str, Any]: \"\"\"Returns", "counter_generator, }, ) return BytesIO(xml.encode()) def taric_xml_record_codes(xml): \"\"\"Yields tuples of", "# test should pass normally fn(*args, **kwargs) # test should", "factory_kwargs=None, check_order=True, ): def decorator(func): def wraps( api_client, taric_schema, approved_transaction,", "that are duplicates of each other and returns the record", "for i in imported] assert expected_data == imported_data _transaction_counter =", "import datetime from datetime import timezone from functools import wraps", "raise AssertionError( \"Either a factory or an object instance need", "\"{value}\"') def make_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records using the passed", "def medium_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), dt +", "period start and end dates of a ValidityPeriodForm from the", "checked. \"\"\" expected_data = get_checkable_data(expected, ignore=ignore) imported_data = get_checkable_data(imported, ignore=ignore)", "parse_date(cutoff) def decorator(fn): @wraps(fn) def do_test(*args, **kwargs): # test should", "Dict[str, int]: \"\"\" Construct a POST data fragment for the", "identifying_fields) ) def make_non_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records using the", "from typing import Dict from typing import Sequence import pytest", "f in model.copyable_fields} - ignore data = { name: getattr(model,", "identifying_fields is None: identifying_fields = list(factory._meta.model.identifying_fields) assert any( get_field_tuple(existing, f)", "should fail. \"\"\" cutoff = parse_date(cutoff) def decorator(fn): @wraps(fn) def", "= pytest.mark.skipif( not COMMODITIES_IMPLEMENTED, reason=\"Commodities not implemented\", ) requires_export_refund_nomenclature =", "django.urls import reverse from freezegun import freeze_time from lxml import", "from common.models.records import TrackedModel from common.renderers import counter_generator from common.serializers", "model. System fields that will change from model to model", "\"earlier\": (relativedelta(years=-1), relativedelta(years=-1, months=+1)), \"later\": ( relativedelta(years=+1, months=+1, days=+1), relativedelta(years=+1,", "name in self.deltas: start, end = self.deltas[name] start = self.now", "+ relativedelta(days=-14), ) @classmethod def medium_before(cls, dt): return TaricDateRange( dt", "relativedelta(days=+14), dt + relativedelta(months=+1), ) @classmethod def short_overlap(cls, dt): return", "(relativedelta(), None), \"normal_first_half\": (relativedelta(), relativedelta(days=+14)), } @property def now(self): return", "validity_period_post_data( >>> datetime.date(2021, 1, 2), >>> datetime.date(2022, 3, 4), >>>", "internal structure. So we need to generate one # form", "nomenclature not implemented\", ) requires_meursing_tables = pytest.mark.skipif( not MEURSING_TABLES_IMPLEMENTED, reason=\"Meursing", "relativedelta(years=+2, days=+1)), \"adjacent\": (relativedelta(days=+1), relativedelta(months=+1)), \"adjacent_earlier\": (relativedelta(months=-1), relativedelta(days=-1)), \"adjacent_later\": (relativedelta(months=+1,", "fields that have sub-fields and hence result in multiple HTML", "self.deltas: start, end = self.deltas[name] start = self.now + start", "\"end_date_2\": 2022, } \"\"\" return { f\"{name}_{i}\": part for name,", "def make_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records using the passed factory", "} @property def now(self): return self.datetime_now.date() @property def datetime_now(self): return", "(relativedelta(weeks=-4), relativedelta(weeks=+4)), \"future\": (relativedelta(weeks=+10), relativedelta(weeks=+20)), \"no_end\": (relativedelta(), None), \"normal_first_half\": (relativedelta(),", "identifying_fields = { name: data[name].get_identifying_fields() for name in checked_field_names if", "do_test(*args, **kwargs): # test should pass normally fn(*args, **kwargs) #", "model to model are not checked. Any field names given", "(relativedelta(), relativedelta(days=+14)), } @property def now(self): return self.datetime_now.date() @property def", "and not instance: raise AssertionError( \"Either a factory or an", "def short_before(cls, dt): return TaricDateRange( dt + relativedelta(months=-1), dt +", "UPDATE_IMPORTER_IMPLEMENTED, reason=\"Requires Updating importers to be implemented\", ) @contextlib.contextmanager def", "name in checked_field_names } identifying_fields = { name: data[name].get_identifying_fields() for", "instance need to be provided\", ) if factory and instance:", "datetime import date from datetime import datetime from datetime import", "4, \"end_date_1\": 3, \"end_date_2\": 2022, } \"\"\" return { f\"{name}_{i}\":", "f\"{name}_{i}\": part for name, date in ((\"start_date\", start), (\"end_date\", end))", "else form.fields[field].initial if hasattr(form.fields[field].widget, \"decompress\"): # If the widget can", "for value \"{value}\"') def make_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records using", "every value for every field in the imported model is", "expected_valid: pytest.fail(f'Unexpected validation error for value \"{value}\"') except Exception: raise", "This mirrors the MultiValueWidget in django/forms/widgets.py. if field in data:", "api_client, taric_schema, approved_transaction, valid_user, *args, **kwargs, ): if not factory", "not raise {exception}\") def check_validator(validate, value, expected_valid): try: validate(value) except", "expected models, and that the count of both is equal.", "def datetime_now(self): return datetime.now(tz=UTC).replace(hour=0, minute=0, second=0, microsecond=0) def __getattr__(self, name):", "return self.datetime_now.date() @property def datetime_now(self): return datetime.now(tz=UTC).replace(hour=0, minute=0, second=0, microsecond=0)", "cutoff date. :param cutoff: A date string, or datetime object", "def generate_test_import_xml(obj: dict) -> BytesIO: xml = render_to_string( template_name=\"workbaskets/taric/transaction_detail.xml\", context={", "data ignoring any automatically set fields and fields with names", "+ relativedelta(days=-1)): try: fn(*args, **kwargs) except pytest.fail.Exception: pass except Exception:", "contain the identifying fields for any linked models rather than", "taric_schema.error_log, f\"XML errors: {taric_schema.error_log}\" if check_order: validate_taric_xml_record_order(xml) kwargs = {\"xml\":", "def short_after(cls, dt): return TaricDateRange( dt + relativedelta(days=+14), dt +", "dt + relativedelta(months=+1), ) @classmethod def no_end_before(cls, dt): return TaricDateRange(", "and has some internal structure. So we need to generate", "implemented\", ) requires_partial_temporary_stop = pytest.mark.skipif( not PARTIAL_TEMPORARY_STOP_IMPLEMENTED, reason=\"Partial temporary stop", "for f in identifying_fields ) return not_duplicate def get_checkable_data(model: TrackedModel,", "return TaricDateRange( dt + relativedelta(months=-1), None, ) def only_applicable_after(cutoff): \"\"\"", "field in identifying_fields) ) def make_non_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records", "for i, v in enumerate(value) if v is not None}", "raise else: if expected: pytest.fail(f\"Did not raise {exception}\") def check_validator(validate,", "else: if not expected_valid: pytest.fail(f'Expected validation error for value \"{value}\"')", "\"start_date_1\": 2, \"start_date_2\": 2021, \"end_date_0\": 4, \"end_date_1\": 3, \"end_date_2\": 2022,", "reason=\"Export refund nomenclature not implemented\", ) requires_meursing_tables = pytest.mark.skipif( not", "= timezone.utc requires_commodities = pytest.mark.skipif( not COMMODITIES_IMPLEMENTED, reason=\"Commodities not implemented\",", "# { # \"description\": \"My sample footnote text\", # \"described_footnote\":", "Sequence[TrackedModel], ignore=frozenset(), ): \"\"\" Asserts that every value for every", "etree.XML(content) taric_schema.validate(xml) assert not taric_schema.error_log, f\"XML errors: {taric_schema.error_log}\" if check_order:", "months=+1), ), \"overlap_big\": (relativedelta(years=+1), relativedelta(years=+3, days=+2)), \"after_big\": ( relativedelta(years=+3, months=+1),", "before which the test should fail. \"\"\" cutoff = parse_date(cutoff)", "to `ignore` will also not be checked. \"\"\" expected_data =", "fragment for the validity period start and end dates of", "\"end_date_0\": 4, \"end_date_1\": 3, \"end_date_2\": 2022, } \"\"\" return {", "fields for any linked models rather than internal PKs. For", "model.copyable_fields} - ignore data = { name: getattr(model, get_accessor(model._meta.get_field(name))) for", "transaction=approved_transaction, **factory_kwargs or {} ) api_client.force_login(user=valid_user) response = api_client.get( reverse(", "None, ) def only_applicable_after(cutoff): \"\"\" Decorator which asserts that a", "forms from django.core.exceptions import ValidationError from django.template.loader import render_to_string from", "be checked. \"\"\" expected_data = get_checkable_data(expected, ignore=ignore) imported_data = get_checkable_data(imported,", "v in enumerate(value) if v is not None} ) elif", "\"message_counter\": counter_generator(), \"counter_generator\": counter_generator, }, ) return BytesIO(xml.encode()) def taric_xml_record_codes(xml):", "data def assert_records_match( expected: TrackedModel, imported: TrackedModel, ignore=frozenset(), ): \"\"\"", "`ignore`. The returned data will contain the identifying fields for", "TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(months=+1), ) @classmethod def", "# } \"\"\" checked_field_names = {f.name for f in model.copyable_fields}", "\"decompress\"): # If the widget can be decompressed, then it", "expected_valid): try: validate(value) except ValidationError: if expected_valid: pytest.fail(f'Unexpected validation error", "expected_data = [get_checkable_data(e, ignore=ignore) for e in expected] imported_data =", "years=+1), ), \"overlap_normal_earlier\": ( relativedelta(months=-1, days=+14), relativedelta(days=+14), ), \"overlap_normal_same_year\": (", "which the test should fail. \"\"\" cutoff = parse_date(cutoff) def", "from lxml import etree from common.models.records import TrackedModel from common.renderers", "not EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED, reason=\"Export refund nomenclature not implemented\", ) requires_meursing_tables =", "import get_accessor from common.util import get_field_tuple INTERDEPENDENT_IMPORT_IMPLEMENTED = True UPDATE_IMPORTER_IMPLEMENTED", "), \"overlap_normal_earlier\": ( relativedelta(months=-1, days=+14), relativedelta(days=+14), ), \"overlap_normal_same_year\": ( relativedelta(days=+15),", "not implemented\", ) requires_meursing_tables = pytest.mark.skipif( not MEURSING_TABLES_IMPLEMENTED, reason=\"Meursing tables", "is not None: end = self.now + end return TaricDateRange(start,", "fails after a specified cutoff date. :param cutoff: A date", "will contain the identifying fields for any linked models rather", "relativedelta(years=+1, months=+2), ), \"big\": (relativedelta(years=-2), relativedelta(years=+2, days=+1)), \"adjacent\": (relativedelta(days=+1), relativedelta(months=+1)),", "except exception: if not expected: raise else: if expected: pytest.fail(f\"Did", "= { \"normal\": (relativedelta(), relativedelta(months=+1)), \"earlier\": (relativedelta(years=-1), relativedelta(years=-1, months=+1)), \"later\":", "return datetime.now(tz=UTC).replace(hour=0, minute=0, second=0, microsecond=0) def __getattr__(self, name): if name", "name): if name in self.deltas: start, end = self.deltas[name] start", "} def get_form_data(form: forms.ModelForm) -> Dict[str, Any]: \"\"\"Returns a dictionary", "None: identifying_fields = list(factory._meta.model.identifying_fields) assert any( get_field_tuple(existing, f) != get_field_tuple(not_duplicate,", "\"overlap_normal\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1, years=+1), ), \"overlap_normal_earlier\": ( relativedelta(months=-1,", "and append the name with _0, _1, # etc. This", "not None} ) elif value is not None: data.setdefault(field, value)", "form.fields[field].initial if hasattr(form.fields[field].widget, \"decompress\"): # If the widget can be", "ValidityPeriodForm from the given date objects, eg: >>> validity_period_post_data( >>>", "import TrackedModel from common.renderers import counter_generator from common.serializers import validate_taric_xml_record_order", "Updating importers to be implemented\", ) @contextlib.contextmanager def raises_if(exception, expected):", "in the imported models is the same as the data", "\"adjacent_no_end\": (relativedelta(months=+1, days=+1), None), \"adjacent_even_later\": ( relativedelta(months=+2, days=+1), relativedelta(months=+3), ),", "( relativedelta(months=-1, days=+14), relativedelta(days=+14), ), \"overlap_normal_same_year\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1),", "dt + relativedelta(months=-1), dt + relativedelta(days=-1), ) @classmethod def short_after(cls,", "fields and fields with names passed to `ignore`. The returned", "cutoff with freeze_time(cutoff + relativedelta(days=-1)): try: fn(*args, **kwargs) except pytest.fail.Exception:", "contextlib from datetime import date from datetime import datetime from", "checked_field_names } identifying_fields = { name: data[name].get_identifying_fields() for name in", "from datetime import datetime from datetime import timezone from functools", "assert expected_data == imported_data _transaction_counter = count(start=1) def generate_test_import_xml(obj: dict)", "ignore=ignore) for i in imported] assert expected_data == imported_data _transaction_counter", "from typing import Sequence import pytest from dateutil.parser import parse", "= etree.XML(content) taric_schema.validate(xml) assert not taric_schema.error_log, f\"XML errors: {taric_schema.error_log}\" if", "class Dates: deltas = { \"normal\": (relativedelta(), relativedelta(months=+1)), \"earlier\": (relativedelta(years=-1),", "+ relativedelta(months=-1), dt + relativedelta(days=-1), ) @classmethod def short_after(cls, dt):", "except ValidationError: if expected_valid: pytest.fail(f'Unexpected validation error for value \"{value}\"')", "end)) for i, part in enumerate([date.day, date.month, date.year]) } def", "xml, **kwargs} func( *args, **kwargs, ) return wraps return decorator", "\"after_big\": ( relativedelta(years=+3, months=+1), relativedelta(years=+3, months=+2), ), \"backwards\": (relativedelta(months=+1), relativedelta(days=+1)),", "not COMMODITIES_IMPLEMENTED, reason=\"Commodities not implemented\", ) requires_export_refund_nomenclature = pytest.mark.skipif( not", "two records using the passed factory that are duplicates of", "a dict representing the model's data ignoring any automatically set", ") requires_update_importer = pytest.mark.skipif( not UPDATE_IMPORTER_IMPLEMENTED, reason=\"Requires Updating importers to", "local-name()='subrecord.code']/text()\", ) return [tuple(codes(record)) for record in records] def validate_taric_xml(", "model are not checked. Any field names given to `ignore`", "get_checkable_data(expected, ignore=ignore) imported_data = get_checkable_data(imported, ignore=ignore) assert expected_data == imported_data", "else: pytest.fail(f\"Rule applied before {cutoff:%Y-%m-%d}\") return True return do_test return", "\"normal_first_half\": (relativedelta(), relativedelta(days=+14)), } @property def now(self): return self.datetime_now.date() @property", "in imported] assert expected_data == imported_data _transaction_counter = count(start=1) def", "requires_export_refund_nomenclature = pytest.mark.skipif( not EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED, reason=\"Export refund nomenclature not implemented\",", "render_to_string( template_name=\"workbaskets/taric/transaction_detail.xml\", context={ \"envelope_id\": next(_transaction_counter), \"tracked_models\": [obj], \"transaction_id\": next(_transaction_counter), \"message_counter\":", "is None: identifying_fields = list(factory._meta.model.identifying_fields) return factory.create( **dict(get_field_tuple(existing, field) for", "Sequence[TrackedModel], imported: Sequence[TrackedModel], ignore=frozenset(), ): \"\"\" Asserts that every value", "imports not implemented\", ) requires_update_importer = pytest.mark.skipif( not UPDATE_IMPORTER_IMPLEMENTED, reason=\"Requires", "`ignore` will also not be checked. \"\"\" expected_data = get_checkable_data(expected,", "codes = etree.XPath( \".//*[local-name()='record.code' or local-name()='subrecord.code']/text()\", ) return [tuple(codes(record)) for", "= False COMMODITIES_IMPLEMENTED = True MEURSING_TABLES_IMPLEMENTED = False PARTIAL_TEMPORARY_STOP_IMPLEMENTED =", "wraps from io import BytesIO from itertools import count from", "PARTIAL_TEMPORARY_STOP_IMPLEMENTED = False UTC = timezone.utc requires_commodities = pytest.mark.skipif( not", "common.util import get_field_tuple INTERDEPENDENT_IMPORT_IMPLEMENTED = True UPDATE_IMPORTER_IMPLEMENTED = True EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED", "the record created last.\"\"\" existing = factory.create() not_duplicate = factory.create()", "identifying_fields=None): \"\"\"Creates two records using the passed factory that are", "== imported_data _transaction_counter = count(start=1) def generate_test_import_xml(obj: dict) -> BytesIO:", "months=+1, years=+1), ), \"overlap_normal_earlier\": ( relativedelta(months=-1, days=+14), relativedelta(days=+14), ), \"overlap_normal_same_year\":", "ValidationError from django.template.loader import render_to_string from django.urls import reverse from", "and returns the record created last.\"\"\" existing = factory.create() not_duplicate", "created last.\"\"\" existing = factory.create() not_duplicate = factory.create() if identifying_fields", "\"backwards\": (relativedelta(months=+1), relativedelta(days=+1)), \"starts_with_normal\": (relativedelta(), relativedelta(days=+14)), \"ends_with_normal\": (relativedelta(days=+14), relativedelta(months=+1)), \"current\":", "a test fails after a specified cutoff date. :param cutoff:", "generate_test_import_xml(obj: dict) -> BytesIO: xml = render_to_string( template_name=\"workbaskets/taric/transaction_detail.xml\", context={ \"envelope_id\":", "existing = factory.create() not_duplicate = factory.create() if identifying_fields is None:", "{cutoff:%Y-%m-%d}\") return True return do_test return decorator def validity_period_post_data(start: date,", "= {**form.initial} for field in form.rendered_fields: value = data[field] if", "field in data else form.fields[field].initial if hasattr(form.fields[field].widget, \"decompress\"): # If", "not INTERDEPENDENT_IMPORT_IMPLEMENTED, reason=\"Interdependent imports not implemented\", ) requires_update_importer = pytest.mark.skipif(", "if name in self.deltas: start, end = self.deltas[name] start =", "import contextlib from datetime import date from datetime import datetime", "@classmethod def short_after(cls, dt): return TaricDateRange( dt + relativedelta(days=+14), dt", "(relativedelta(months=+1), relativedelta(days=+1)), \"starts_with_normal\": (relativedelta(), relativedelta(days=+14)), \"ends_with_normal\": (relativedelta(days=+14), relativedelta(months=+1)), \"current\": (relativedelta(weeks=-4),", "record created last.\"\"\" existing = factory.create() # allow overriding identifying_fields", "UTC = timezone.utc requires_commodities = pytest.mark.skipif( not COMMODITIES_IMPLEMENTED, reason=\"Commodities not", "( relativedelta(months=+1, days=+1), relativedelta(years=+2, months=+2), ), \"overlap_normal\": ( relativedelta(days=+15), relativedelta(days=+14,", "date objects, eg: >>> validity_period_post_data( >>> datetime.date(2021, 1, 2), >>>", "data in the expected model. System fields that will change", "both is equal. System fields that will change from model", "datetime.date(2021, 1, 2), >>> datetime.date(2022, 3, 4), >>> ) {", "MEURSING_TABLES_IMPLEMENTED, reason=\"Meursing tables not implemented\", ) requires_partial_temporary_stop = pytest.mark.skipif( not", "months=+2), ), \"backwards\": (relativedelta(months=+1), relativedelta(days=+1)), \"starts_with_normal\": (relativedelta(), relativedelta(days=+14)), \"ends_with_normal\": (relativedelta(days=+14),", "error for value \"{value}\"') except Exception: raise else: if not", "not be checked. \"\"\" expected_data = [get_checkable_data(e, ignore=ignore) for e", "be checked. \"\"\" expected_data = [get_checkable_data(e, ignore=ignore) for e in", "= factory.create() # allow overriding identifying_fields if identifying_fields is None:", "not None: end = self.now + end return TaricDateRange(start, end)", "object before which the test should fail. \"\"\" cutoff =", "i, v in enumerate(value) if v is not None} )", "not implemented\", ) requires_export_refund_nomenclature = pytest.mark.skipif( not EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED, reason=\"Export refund", "the passed factory that are duplicates of each other and", "import wraps from io import BytesIO from itertools import count", "relativedelta(weeks=+20)), \"no_end\": (relativedelta(), None), \"normal_first_half\": (relativedelta(), relativedelta(days=+14)), } @property def", "\"\"\" cutoff = parse_date(cutoff) def decorator(fn): @wraps(fn) def do_test(*args, **kwargs):", "or local-name()='subrecord.code']/text()\", ) return [tuple(codes(record)) for record in records] def", "i, part in enumerate([date.day, date.month, date.year]) } def get_form_data(form: forms.ModelForm)", "django.core.exceptions import ValidationError from django.template.loader import render_to_string from django.urls import", "in the expected models, and that the count of both", "True MEURSING_TABLES_IMPLEMENTED = False PARTIAL_TEMPORARY_STOP_IMPLEMENTED = False UTC = timezone.utc", "imported] assert expected_data == imported_data _transaction_counter = count(start=1) def generate_test_import_xml(obj:", ") return wraps return decorator class Dates: deltas = {", "values, taking account of any fields that have sub-fields and", "requires_commodities = pytest.mark.skipif( not COMMODITIES_IMPLEMENTED, reason=\"Commodities not implemented\", ) requires_export_refund_nomenclature", "not both.\", ) current_instance = instance or factory.create( transaction=approved_transaction, **factory_kwargs", "+ relativedelta(months=+1), ) @classmethod def short_overlap(cls, dt): return TaricDateRange( dt", "specified cutoff date. :param cutoff: A date string, or datetime", "months=+2), ), \"overlap_normal\": ( relativedelta(days=+15), relativedelta(days=+14, months=+1, years=+1), ), \"overlap_normal_earlier\":", "= True UPDATE_IMPORTER_IMPLEMENTED = True EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED = False COMMODITIES_IMPLEMENTED =", "( relativedelta(months=+2, days=+1), relativedelta(months=+3), ), \"adjacent_earlier_big\": ( relativedelta(years=-2, months=-2), relativedelta(years=-2),", "return TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(days=-14), ) @classmethod", ") @classmethod def short_after(cls, dt): return TaricDateRange( dt + relativedelta(days=+14),", "for field in form.rendered_fields: value = data[field] if field in", "EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED, reason=\"Export refund nomenclature not implemented\", ) requires_meursing_tables = pytest.mark.skipif(", "not expected_valid: pytest.fail(f'Expected validation error for value \"{value}\"') def make_duplicate_record(factory,", "not_duplicate def get_checkable_data(model: TrackedModel, ignore=frozenset()): \"\"\" Returns a dict representing", "microsecond=0) def __getattr__(self, name): if name in self.deltas: start, end", ">>> ) { \"start_date_0\": 1, \"start_date_1\": 2, \"start_date_2\": 2021, \"end_date_0\":", "put onto a page and their current values, taking account", "tables not implemented\", ) requires_partial_temporary_stop = pytest.mark.skipif( not PARTIAL_TEMPORARY_STOP_IMPLEMENTED, reason=\"Partial", ") requires_export_refund_nomenclature = pytest.mark.skipif( not EXPORT_REFUND_NOMENCLATURE_IMPLEMENTED, reason=\"Export refund nomenclature not", "raise else: if not expected_valid: pytest.fail(f'Expected validation error for value", "error for value \"{value}\"') def make_duplicate_record(factory, identifying_fields=None): \"\"\"Creates two records", "3, 4), >>> ) { \"start_date_0\": 1, \"start_date_1\": 2, \"start_date_2\":", "+ end return TaricDateRange(start, end) raise AttributeError(name) @classmethod def short_before(cls,", "import Any from typing import Dict from typing import Sequence", "= False PARTIAL_TEMPORARY_STOP_IMPLEMENTED = False UTC = timezone.utc requires_commodities =", "test should fail before cutoff with freeze_time(cutoff + relativedelta(days=-1)): try:", "from django import forms from django.core.exceptions import ValidationError from django.template.loader", "AssertionError( \"Either a factory or an object instance need to", "\".//*[local-name()='record.code' or local-name()='subrecord.code']/text()\", ) return [tuple(codes(record)) for record in records]", "than internal PKs. For example: get_checkable_data(FootnoteDescriptionFactory(), ignore={\"sid\"}) # { #", "equal. System fields that will change from model to model", ") assert response.status_code == 200 content = response.content xml =", "errors: {taric_schema.error_log}\" if check_order: validate_taric_xml_record_order(xml) kwargs = {\"xml\": xml, **kwargs}", "which asserts that a test fails after a specified cutoff", "datetime object before which the test should fail. \"\"\" cutoff", "given date objects, eg: >>> validity_period_post_data( >>> datetime.date(2021, 1, 2),", "\"start_date_2\": 2021, \"end_date_0\": 4, \"end_date_1\": 3, \"end_date_2\": 2022, } \"\"\"", "def assert_records_match( expected: TrackedModel, imported: TrackedModel, ignore=frozenset(), ): \"\"\" Asserts", "from itertools import count from typing import Any from typing", "__getattr__(self, name): if name in self.deltas: start, end = self.deltas[name]", "for f in model.copyable_fields} - ignore data = { name:", "current_instance = instance or factory.create( transaction=approved_transaction, **factory_kwargs or {} )", "timezone.utc requires_commodities = pytest.mark.skipif( not COMMODITIES_IMPLEMENTED, reason=\"Commodities not implemented\", )", "return TaricDateRange( dt + relativedelta(months=-1), dt + relativedelta(months=+1), ) @classmethod", "if expected_valid: pytest.fail(f'Unexpected validation error for value \"{value}\"') except Exception:", "= parse_date(cutoff) def decorator(fn): @wraps(fn) def do_test(*args, **kwargs): # test", "relativedelta(months=+3), ), \"adjacent_earlier_big\": ( relativedelta(years=-2, months=-2), relativedelta(years=-2), ), \"adjacent_later_big\": (", "fields that the form will put onto a page and", "= instance or factory.create( transaction=approved_transaction, **factory_kwargs or {} ) api_client.force_login(user=valid_user)", "from model to model are not checked. Any field names", "template_name=\"workbaskets/taric/transaction_detail.xml\", context={ \"envelope_id\": next(_transaction_counter), \"tracked_models\": [obj], \"transaction_id\": next(_transaction_counter), \"message_counter\": counter_generator(),", "relativedelta(years=-1, months=+1)), \"later\": ( relativedelta(years=+1, months=+1, days=+1), relativedelta(years=+1, months=+2), )," ]
[ "# 发送HTML邮件 # msg = MIMEText('<html><body><h1>Hello</h1>' + # '<p>send by", "'plain', 'utf-8') # 发送HTML邮件 # msg = MIMEText('<html><body><h1>Hello</h1>' + #", "把附件的内容读进来: mime.set_payload(f.read()) # 用Base64编码: encoders.encode_base64(mime) # 添加到MIMEMultipart: msg.attach(mime) msg['From'] =", "'</body></html>', 'html', 'utf-8') # 发送带附件的邮件 # 邮件对象: msg = MIMEMultipart()", "mime.set_payload(f.read()) # 用Base64编码: encoders.encode_base64(mime) # 添加到MIMEMultipart: msg.attach(mime) msg['From'] = _format_addr('Python爱好者", "href=\"http://www.python.org\">Python</a>...</p>' + # '</body></html>', 'html', 'utf-8') # 发送带附件的邮件 # 邮件对象:", "'utf-8') # 发送HTML邮件 # msg = MIMEText('<html><body><h1>Hello</h1>' + # '<p>send", "open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb') as f: # 设置附件的MIME和文件名,这里是png类型: mime = MIMEBase('image', 'png',", "to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() server = smtplib.SMTP(smtp_server, 25) server.set_debuglevel(1)", "<%s>' % to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() # 邮件正文是MIMEText: msg.attach(MIMEText('send", "mime.add_header('Content-Disposition', 'attachment', filename='test.png') mime.add_header('Content-ID', '<0>') mime.add_header('X-Attachment-Id', '0') # 把附件的内容读进来: mime.set_payload(f.read())", "on 2016年8月10日 @author: Administrator ''' from email import encoders from", "with file...', 'plain', 'utf-8')) # 添加附件就是加上一个MIMEBase,从本地读取一个图片: with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb') as", "'<EMAIL>'#input('To: ') smtp_server = 'smtp.163.com'#input('SMTP server: ') # 发送纯文本邮件 #", "'<EMAIL>'#input('From: ') password = input('Password: ') to_addr = '<EMAIL>'#input('To: ')", "to_addr = '<EMAIL>'#input('To: ') smtp_server = 'smtp.163.com'#input('SMTP server: ') #", "<%s>' % from_addr) msg['To'] = _format_addr('管理员 <%s>' % to_addr) msg['Subject']", "= _format_addr('Python爱好者 <%s>' % from_addr) msg['To'] = _format_addr('管理员 <%s>' %", "by <a href=\"http://www.python.org\">Python</a>...</p>' + # '</body></html>', 'html', 'utf-8') # 发送带附件的邮件", "import parseaddr, formataddr import smtplib def _format_addr(s): name, addr =", "+ # '<p>send by <a href=\"http://www.python.org\">Python</a>...</p>' + # '</body></html>', 'html',", "% from_addr) msg['To'] = _format_addr('管理员 <%s>' % to_addr) msg['Subject'] =", "''' from email import encoders from email.header import Header from", "发送纯文本邮件 # msg = MIMEText('hello, send by Python...', 'plain', 'utf-8')", "'utf-8').encode() # 邮件正文是MIMEText: msg.attach(MIMEText('send with file...', 'plain', 'utf-8')) # 添加附件就是加上一个MIMEBase,从本地读取一个图片:", "用Base64编码: encoders.encode_base64(mime) # 添加到MIMEMultipart: msg.attach(mime) msg['From'] = _format_addr('Python爱好者 <%s>' %", "# 用Base64编码: encoders.encode_base64(mime) # 添加到MIMEMultipart: msg.attach(mime) msg['From'] = _format_addr('Python爱好者 <%s>'", "+ # '</body></html>', 'html', 'utf-8') # 发送带附件的邮件 # 邮件对象: msg", "import MIMEMultipart from email.mime.multipart import MIMEBase from email.utils import parseaddr,", "return formataddr((Header(name, 'utf-8').encode(), addr)) from_addr = '<EMAIL>'#input('From: ') password =", "添加到MIMEMultipart: msg.attach(mime) msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr) msg['To'] =", "'attachment', filename='test.png') mime.add_header('Content-ID', '<0>') mime.add_header('X-Attachment-Id', '0') # 把附件的内容读进来: mime.set_payload(f.read()) #", "email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart from email.mime.multipart import", "email.header import Header from email.mime.text import MIMEText from email.mime.multipart import", "msg['To'] = _format_addr('管理员 <%s>' % to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode()", "MIMEText('hello, send by Python...', 'plain', 'utf-8') # 发送HTML邮件 # msg", "_format_addr('管理员 <%s>' % to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() server =", "email.mime.multipart import MIMEMultipart from email.mime.multipart import MIMEBase from email.utils import", "') password = input('Password: ') to_addr = '<EMAIL>'#input('To: ') smtp_server", "email.utils import parseaddr, formataddr import smtplib def _format_addr(s): name, addr", "smtp_server = 'smtp.163.com'#input('SMTP server: ') # 发送纯文本邮件 # msg =", "') # 发送纯文本邮件 # msg = MIMEText('hello, send by Python...',", "# msg = MIMEText('<html><body><h1>Hello</h1>' + # '<p>send by <a href=\"http://www.python.org\">Python</a>...</p>'", "from_addr) msg['To'] = _format_addr('管理员 <%s>' % to_addr) msg['Subject'] = Header('来自SMTP的问候……',", "Created on 2016年8月10日 @author: Administrator ''' from email import encoders", "# '</body></html>', 'html', 'utf-8') # 发送带附件的邮件 # 邮件对象: msg =", "import MIMEText from email.mime.multipart import MIMEMultipart from email.mime.multipart import MIMEBase", "添加附件就是加上一个MIMEBase,从本地读取一个图片: with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb') as f: # 设置附件的MIME和文件名,这里是png类型: mime =", "mime = MIMEBase('image', 'png', filename='test.png') # 加上必要的头信息: mime.add_header('Content-Disposition', 'attachment', filename='test.png')", "邮件正文是MIMEText: msg.attach(MIMEText('send with file...', 'plain', 'utf-8')) # 添加附件就是加上一个MIMEBase,从本地读取一个图片: with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg',", "<gh_stars>0 ''' Created on 2016年8月10日 @author: Administrator ''' from email", "MIMEText('<html><body><h1>Hello</h1>' + # '<p>send by <a href=\"http://www.python.org\">Python</a>...</p>' + # '</body></html>',", "f: # 设置附件的MIME和文件名,这里是png类型: mime = MIMEBase('image', 'png', filename='test.png') # 加上必要的头信息:", "= MIMEBase('image', 'png', filename='test.png') # 加上必要的头信息: mime.add_header('Content-Disposition', 'attachment', filename='test.png') mime.add_header('Content-ID',", "formataddr((Header(name, 'utf-8').encode(), addr)) from_addr = '<EMAIL>'#input('From: ') password = input('Password:", "= smtplib.SMTP(smtp_server, 25) server.set_debuglevel(1) server.login(from_addr, password) server.sendmail(from_addr, [to_addr], msg.as_string()) server.quit()", "# 添加附件就是加上一个MIMEBase,从本地读取一个图片: with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb') as f: # 设置附件的MIME和文件名,这里是png类型: mime", "'html', 'utf-8') # 发送带附件的邮件 # 邮件对象: msg = MIMEMultipart() msg['From']", "encoders from email.header import Header from email.mime.text import MIMEText from", "import encoders from email.header import Header from email.mime.text import MIMEText", "from email.mime.multipart import MIMEMultipart from email.mime.multipart import MIMEBase from email.utils", "# 设置附件的MIME和文件名,这里是png类型: mime = MIMEBase('image', 'png', filename='test.png') # 加上必要的头信息: mime.add_header('Content-Disposition',", "MIMEBase from email.utils import parseaddr, formataddr import smtplib def _format_addr(s):", "# '<p>send by <a href=\"http://www.python.org\">Python</a>...</p>' + # '</body></html>', 'html', 'utf-8')", "= '<EMAIL>'#input('From: ') password = input('Password: ') to_addr = '<EMAIL>'#input('To:", "# 发送纯文本邮件 # msg = MIMEText('hello, send by Python...', 'plain',", "设置附件的MIME和文件名,这里是png类型: mime = MIMEBase('image', 'png', filename='test.png') # 加上必要的头信息: mime.add_header('Content-Disposition', 'attachment',", "from email import encoders from email.header import Header from email.mime.text", "input('Password: ') to_addr = '<EMAIL>'#input('To: ') smtp_server = 'smtp.163.com'#input('SMTP server:", "def _format_addr(s): name, addr = parseaddr(s) return formataddr((Header(name, 'utf-8').encode(), addr))", "2016年8月10日 @author: Administrator ''' from email import encoders from email.header", "= input('Password: ') to_addr = '<EMAIL>'#input('To: ') smtp_server = 'smtp.163.com'#input('SMTP", "import Header from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart", "= parseaddr(s) return formataddr((Header(name, 'utf-8').encode(), addr)) from_addr = '<EMAIL>'#input('From: ')", "by Python...', 'plain', 'utf-8') # 发送HTML邮件 # msg = MIMEText('<html><body><h1>Hello</h1>'", "MIMEBase('image', 'png', filename='test.png') # 加上必要的头信息: mime.add_header('Content-Disposition', 'attachment', filename='test.png') mime.add_header('Content-ID', '<0>')", "发送HTML邮件 # msg = MIMEText('<html><body><h1>Hello</h1>' + # '<p>send by <a", "file...', 'plain', 'utf-8')) # 添加附件就是加上一个MIMEBase,从本地读取一个图片: with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb') as f:", "'rb') as f: # 设置附件的MIME和文件名,这里是png类型: mime = MIMEBase('image', 'png', filename='test.png')", "= _format_addr('管理员 <%s>' % to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() #", "Administrator ''' from email import encoders from email.header import Header", "from email.header import Header from email.mime.text import MIMEText from email.mime.multipart", "MIMEMultipart from email.mime.multipart import MIMEBase from email.utils import parseaddr, formataddr", "email import encoders from email.header import Header from email.mime.text import", "from_addr = '<EMAIL>'#input('From: ') password = input('Password: ') to_addr =", "filename='test.png') # 加上必要的头信息: mime.add_header('Content-Disposition', 'attachment', filename='test.png') mime.add_header('Content-ID', '<0>') mime.add_header('X-Attachment-Id', '0')", "'utf-8') # 发送带附件的邮件 # 邮件对象: msg = MIMEMultipart() msg['From'] =", "'png', filename='test.png') # 加上必要的头信息: mime.add_header('Content-Disposition', 'attachment', filename='test.png') mime.add_header('Content-ID', '<0>') mime.add_header('X-Attachment-Id',", "server = smtplib.SMTP(smtp_server, 25) server.set_debuglevel(1) server.login(from_addr, password) server.sendmail(from_addr, [to_addr], msg.as_string())", "send by Python...', 'plain', 'utf-8') # 发送HTML邮件 # msg =", "'0') # 把附件的内容读进来: mime.set_payload(f.read()) # 用Base64编码: encoders.encode_base64(mime) # 添加到MIMEMultipart: msg.attach(mime)", "# 添加到MIMEMultipart: msg.attach(mime) msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr) msg['To']", "name, addr = parseaddr(s) return formataddr((Header(name, 'utf-8').encode(), addr)) from_addr =", "mime.add_header('X-Attachment-Id', '0') # 把附件的内容读进来: mime.set_payload(f.read()) # 用Base64编码: encoders.encode_base64(mime) # 添加到MIMEMultipart:", "% to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() # 邮件正文是MIMEText: msg.attach(MIMEText('send with", "to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() # 邮件正文是MIMEText: msg.attach(MIMEText('send with file...',", "Header('来自SMTP的问候……', 'utf-8').encode() # 邮件正文是MIMEText: msg.attach(MIMEText('send with file...', 'plain', 'utf-8')) #", "= Header('来自SMTP的问候……', 'utf-8').encode() # 邮件正文是MIMEText: msg.attach(MIMEText('send with file...', 'plain', 'utf-8'))", "email.mime.multipart import MIMEBase from email.utils import parseaddr, formataddr import smtplib", "'utf-8').encode(), addr)) from_addr = '<EMAIL>'#input('From: ') password = input('Password: ')", "from email.utils import parseaddr, formataddr import smtplib def _format_addr(s): name,", "邮件对象: msg = MIMEMultipart() msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr)", "msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() # 邮件正文是MIMEText: msg.attach(MIMEText('send with file...', 'plain',", "from email.mime.multipart import MIMEBase from email.utils import parseaddr, formataddr import", "import smtplib def _format_addr(s): name, addr = parseaddr(s) return formataddr((Header(name,", "<%s>' % to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() server = smtplib.SMTP(smtp_server,", "server: ') # 发送纯文本邮件 # msg = MIMEText('hello, send by", "发送带附件的邮件 # 邮件对象: msg = MIMEMultipart() msg['From'] = _format_addr('Python爱好者 <%s>'", "as f: # 设置附件的MIME和文件名,这里是png类型: mime = MIMEBase('image', 'png', filename='test.png') #", "'<p>send by <a href=\"http://www.python.org\">Python</a>...</p>' + # '</body></html>', 'html', 'utf-8') #", "_format_addr('管理员 <%s>' % to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() # 邮件正文是MIMEText:", "_format_addr('Python爱好者 <%s>' % from_addr) msg['To'] = _format_addr('管理员 <%s>' % to_addr)", "encoders.encode_base64(mime) # 添加到MIMEMultipart: msg.attach(mime) msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr)", "msg = MIMEMultipart() msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr) msg['To']", "parseaddr(s) return formataddr((Header(name, 'utf-8').encode(), addr)) from_addr = '<EMAIL>'#input('From: ') password", "= 'smtp.163.com'#input('SMTP server: ') # 发送纯文本邮件 # msg = MIMEText('hello,", "@author: Administrator ''' from email import encoders from email.header import", "mime.add_header('Content-ID', '<0>') mime.add_header('X-Attachment-Id', '0') # 把附件的内容读进来: mime.set_payload(f.read()) # 用Base64编码: encoders.encode_base64(mime)", "msg = MIMEText('hello, send by Python...', 'plain', 'utf-8') # 发送HTML邮件", "') smtp_server = 'smtp.163.com'#input('SMTP server: ') # 发送纯文本邮件 # msg", "smtplib def _format_addr(s): name, addr = parseaddr(s) return formataddr((Header(name, 'utf-8').encode(),", "msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() server = smtplib.SMTP(smtp_server, 25) server.set_debuglevel(1) server.login(from_addr,", "addr)) from_addr = '<EMAIL>'#input('From: ') password = input('Password: ') to_addr", "= MIMEText('hello, send by Python...', 'plain', 'utf-8') # 发送HTML邮件 #", "'plain', 'utf-8')) # 添加附件就是加上一个MIMEBase,从本地读取一个图片: with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb') as f: #", "# 发送带附件的邮件 # 邮件对象: msg = MIMEMultipart() msg['From'] = _format_addr('Python爱好者", "'utf-8').encode() server = smtplib.SMTP(smtp_server, 25) server.set_debuglevel(1) server.login(from_addr, password) server.sendmail(from_addr, [to_addr],", "filename='test.png') mime.add_header('Content-ID', '<0>') mime.add_header('X-Attachment-Id', '0') # 把附件的内容读进来: mime.set_payload(f.read()) # 用Base64编码:", "= MIMEMultipart() msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr) msg['To'] =", "MIMEMultipart() msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr) msg['To'] = _format_addr('管理员", "import MIMEBase from email.utils import parseaddr, formataddr import smtplib def", "_format_addr(s): name, addr = parseaddr(s) return formataddr((Header(name, 'utf-8').encode(), addr)) from_addr", "'smtp.163.com'#input('SMTP server: ') # 发送纯文本邮件 # msg = MIMEText('hello, send", "= MIMEText('<html><body><h1>Hello</h1>' + # '<p>send by <a href=\"http://www.python.org\">Python</a>...</p>' + #", "# 邮件对象: msg = MIMEMultipart() msg['From'] = _format_addr('Python爱好者 <%s>' %", "msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr) msg['To'] = _format_addr('管理员 <%s>'", "formataddr import smtplib def _format_addr(s): name, addr = parseaddr(s) return", "# 把附件的内容读进来: mime.set_payload(f.read()) # 用Base64编码: encoders.encode_base64(mime) # 添加到MIMEMultipart: msg.attach(mime) msg['From']", "msg.attach(mime) msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr) msg['To'] = _format_addr('管理员", "parseaddr, formataddr import smtplib def _format_addr(s): name, addr = parseaddr(s)", "''' Created on 2016年8月10日 @author: Administrator ''' from email import", "from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart from email.mime.multipart", "addr = parseaddr(s) return formataddr((Header(name, 'utf-8').encode(), addr)) from_addr = '<EMAIL>'#input('From:", "MIMEText from email.mime.multipart import MIMEMultipart from email.mime.multipart import MIMEBase from", "') to_addr = '<EMAIL>'#input('To: ') smtp_server = 'smtp.163.com'#input('SMTP server: ')", "= _format_addr('管理员 <%s>' % to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() server", "= '<EMAIL>'#input('To: ') smtp_server = 'smtp.163.com'#input('SMTP server: ') # 发送纯文本邮件", "<a href=\"http://www.python.org\">Python</a>...</p>' + # '</body></html>', 'html', 'utf-8') # 发送带附件的邮件 #", "msg = MIMEText('<html><body><h1>Hello</h1>' + # '<p>send by <a href=\"http://www.python.org\">Python</a>...</p>' +", "msg.attach(MIMEText('send with file...', 'plain', 'utf-8')) # 添加附件就是加上一个MIMEBase,从本地读取一个图片: with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb')", "password = input('Password: ') to_addr = '<EMAIL>'#input('To: ') smtp_server =", "Python...', 'plain', 'utf-8') # 发送HTML邮件 # msg = MIMEText('<html><body><h1>Hello</h1>' +", "= Header('来自SMTP的问候……', 'utf-8').encode() server = smtplib.SMTP(smtp_server, 25) server.set_debuglevel(1) server.login(from_addr, password)", "Header('来自SMTP的问候……', 'utf-8').encode() server = smtplib.SMTP(smtp_server, 25) server.set_debuglevel(1) server.login(from_addr, password) server.sendmail(from_addr,", "'<0>') mime.add_header('X-Attachment-Id', '0') # 把附件的内容读进来: mime.set_payload(f.read()) # 用Base64编码: encoders.encode_base64(mime) #", "Header from email.mime.text import MIMEText from email.mime.multipart import MIMEMultipart from", "# 邮件正文是MIMEText: msg.attach(MIMEText('send with file...', 'plain', 'utf-8')) # 添加附件就是加上一个MIMEBase,从本地读取一个图片: with", "# msg = MIMEText('hello, send by Python...', 'plain', 'utf-8') #", "with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb') as f: # 设置附件的MIME和文件名,这里是png类型: mime = MIMEBase('image',", "% to_addr) msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode() server = smtplib.SMTP(smtp_server, 25)", "# 加上必要的头信息: mime.add_header('Content-Disposition', 'attachment', filename='test.png') mime.add_header('Content-ID', '<0>') mime.add_header('X-Attachment-Id', '0') #", "加上必要的头信息: mime.add_header('Content-Disposition', 'attachment', filename='test.png') mime.add_header('Content-ID', '<0>') mime.add_header('X-Attachment-Id', '0') # 把附件的内容读进来:", "'utf-8')) # 添加附件就是加上一个MIMEBase,从本地读取一个图片: with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb') as f: # 设置附件的MIME和文件名,这里是png类型:" ]
[ "queue system installed. queue_resources = QueueWorkerResources( number_of_threads=1, number_of_gpus=1, preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA, per_thread_memory_limit=5", "a HPC resources with the LSF queue system installed. queue_resources", "\"module load cuda/10.1\"] calculation_backend = DaskLSFBackend( minimum_number_of_workers=1, maximum_number_of_workers=50, resources_per_worker=queue_resources, queue_name=\"gpuqueue\",", "SmirnoffForceFieldSource.from_path(force_field_path) # Load in the test set. data_set = PhysicalPropertyDataSet.from_json(\"full_set.json\")", "client = EvaluatorClient(ConnectionOptions(server_port=8004)) request, _ = client.request_estimate( property_set=data_set, force_field_source=force_field_source, )", "= \"openff-1.0.0.offxml\" force_field_source = SmirnoffForceFieldSource.from_path(force_field_path) # Load in the test", "Load in the test set. data_set = PhysicalPropertyDataSet.from_json(\"full_set.json\") # Set", "working_directory=working_directory, port=8004, ) with server: # Request the estimates. client", ") # Wait for the results. results, _ = request.results(True,", "the calculations on. This assume running # on a HPC", "on a HPC resources with the LSF queue system installed.", "import QueueWorkerResources from evaluator.backends.dask import DaskLSFBackend from evaluator.client import ConnectionOptions,", "PhysicalPropertyDataSet.from_json(\"full_set.json\") # Set up a server object to run the", "with calculation_backend: server = EvaluatorServer( calculation_backend=calculation_backend, working_directory=working_directory, port=8004, ) with", "per_thread_memory_limit=5 * unit.gigabyte, wallclock_time_limit=\"05:59\", ) worker_script_commands = [\"conda activate forcebalance\",", "QueueWorkerResources( number_of_threads=1, number_of_gpus=1, preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA, per_thread_memory_limit=5 * unit.gigabyte, wallclock_time_limit=\"05:59\", ) worker_script_commands", "\"working_directory\" # Set up a backend to run the calculations", ") with calculation_backend: server = EvaluatorServer( calculation_backend=calculation_backend, working_directory=working_directory, port=8004, )", "LSF queue system installed. queue_resources = QueueWorkerResources( number_of_threads=1, number_of_gpus=1, preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA,", "Set up a backend to run the calculations on. This", "# Set up a server object to run the calculations", "the results. results, _ = request.results(True, 5) results.json(f\"results.json\") if __name__", "setup_timestamp_logging def main(): setup_timestamp_logging() # Load in the force field", "def main(): setup_timestamp_logging() # Load in the force field force_field_path", "= EvaluatorClient(ConnectionOptions(server_port=8004)) request, _ = client.request_estimate( property_set=data_set, force_field_source=force_field_source, ) #", "with the LSF queue system installed. queue_resources = QueueWorkerResources( number_of_threads=1,", "up a backend to run the calculations on. This assume", "unit from evaluator.backends import QueueWorkerResources from evaluator.backends.dask import DaskLSFBackend from", "cuda/10.1\"] calculation_backend = DaskLSFBackend( minimum_number_of_workers=1, maximum_number_of_workers=50, resources_per_worker=queue_resources, queue_name=\"gpuqueue\", setup_script_commands=worker_script_commands, adaptive_interval=\"1000ms\",", "# Request the estimates. client = EvaluatorClient(ConnectionOptions(server_port=8004)) request, _ =", "the LSF queue system installed. queue_resources = QueueWorkerResources( number_of_threads=1, number_of_gpus=1,", "setup_script_commands=worker_script_commands, adaptive_interval=\"1000ms\", ) with calculation_backend: server = EvaluatorServer( calculation_backend=calculation_backend, working_directory=working_directory,", "assume running # on a HPC resources with the LSF", "= SmirnoffForceFieldSource.from_path(force_field_path) # Load in the test set. data_set =", "in the test set. data_set = PhysicalPropertyDataSet.from_json(\"full_set.json\") # Set up", "evaluator.server import EvaluatorServer from evaluator.utils import setup_timestamp_logging def main(): setup_timestamp_logging()", "DaskLSFBackend( minimum_number_of_workers=1, maximum_number_of_workers=50, resources_per_worker=queue_resources, queue_name=\"gpuqueue\", setup_script_commands=worker_script_commands, adaptive_interval=\"1000ms\", ) with calculation_backend:", "to run the calculations using. working_directory = \"working_directory\" # Set", "number_of_gpus=1, preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA, per_thread_memory_limit=5 * unit.gigabyte, wallclock_time_limit=\"05:59\", ) worker_script_commands = [\"conda", "[\"conda activate forcebalance\", \"module load cuda/10.1\"] calculation_backend = DaskLSFBackend( minimum_number_of_workers=1,", "Wait for the results. results, _ = request.results(True, 5) results.json(f\"results.json\")", "from evaluator.client import ConnectionOptions, EvaluatorClient from evaluator.datasets import PhysicalPropertyDataSet from", "= DaskLSFBackend( minimum_number_of_workers=1, maximum_number_of_workers=50, resources_per_worker=queue_resources, queue_name=\"gpuqueue\", setup_script_commands=worker_script_commands, adaptive_interval=\"1000ms\", ) with", "port=8004, ) with server: # Request the estimates. client =", "calculations using. working_directory = \"working_directory\" # Set up a backend", "worker_script_commands = [\"conda activate forcebalance\", \"module load cuda/10.1\"] calculation_backend =", "from evaluator.forcefield import SmirnoffForceFieldSource from evaluator.server import EvaluatorServer from evaluator.utils", "* unit.gigabyte, wallclock_time_limit=\"05:59\", ) worker_script_commands = [\"conda activate forcebalance\", \"module", "Request the estimates. client = EvaluatorClient(ConnectionOptions(server_port=8004)) request, _ = client.request_estimate(", "= [\"conda activate forcebalance\", \"module load cuda/10.1\"] calculation_backend = DaskLSFBackend(", "from evaluator import unit from evaluator.backends import QueueWorkerResources from evaluator.backends.dask", "from evaluator.backends import QueueWorkerResources from evaluator.backends.dask import DaskLSFBackend from evaluator.client", "run the calculations using. working_directory = \"working_directory\" # Set up", "# Wait for the results. results, _ = request.results(True, 5)", "evaluator import unit from evaluator.backends import QueueWorkerResources from evaluator.backends.dask import", "set. data_set = PhysicalPropertyDataSet.from_json(\"full_set.json\") # Set up a server object", "from evaluator.backends.dask import DaskLSFBackend from evaluator.client import ConnectionOptions, EvaluatorClient from", "= \"working_directory\" # Set up a backend to run the", "calculations on. This assume running # on a HPC resources", "EvaluatorClient from evaluator.datasets import PhysicalPropertyDataSet from evaluator.forcefield import SmirnoffForceFieldSource from", "to run the calculations on. This assume running # on", "a backend to run the calculations on. This assume running", "wallclock_time_limit=\"05:59\", ) worker_script_commands = [\"conda activate forcebalance\", \"module load cuda/10.1\"]", "minimum_number_of_workers=1, maximum_number_of_workers=50, resources_per_worker=queue_resources, queue_name=\"gpuqueue\", setup_script_commands=worker_script_commands, adaptive_interval=\"1000ms\", ) with calculation_backend: server", "evaluator.datasets import PhysicalPropertyDataSet from evaluator.forcefield import SmirnoffForceFieldSource from evaluator.server import", "QueueWorkerResources from evaluator.backends.dask import DaskLSFBackend from evaluator.client import ConnectionOptions, EvaluatorClient", "DaskLSFBackend from evaluator.client import ConnectionOptions, EvaluatorClient from evaluator.datasets import PhysicalPropertyDataSet", "client.request_estimate( property_set=data_set, force_field_source=force_field_source, ) # Wait for the results. results,", "import SmirnoffForceFieldSource from evaluator.server import EvaluatorServer from evaluator.utils import setup_timestamp_logging", "on. This assume running # on a HPC resources with", "server = EvaluatorServer( calculation_backend=calculation_backend, working_directory=working_directory, port=8004, ) with server: #", ") with server: # Request the estimates. client = EvaluatorClient(ConnectionOptions(server_port=8004))", "import EvaluatorServer from evaluator.utils import setup_timestamp_logging def main(): setup_timestamp_logging() #", "request, _ = client.request_estimate( property_set=data_set, force_field_source=force_field_source, ) # Wait for", "with server: # Request the estimates. client = EvaluatorClient(ConnectionOptions(server_port=8004)) request,", "= QueueWorkerResources( number_of_threads=1, number_of_gpus=1, preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA, per_thread_memory_limit=5 * unit.gigabyte, wallclock_time_limit=\"05:59\", )", "server object to run the calculations using. working_directory = \"working_directory\"", "= client.request_estimate( property_set=data_set, force_field_source=force_field_source, ) # Wait for the results.", "queue_name=\"gpuqueue\", setup_script_commands=worker_script_commands, adaptive_interval=\"1000ms\", ) with calculation_backend: server = EvaluatorServer( calculation_backend=calculation_backend,", "EvaluatorClient(ConnectionOptions(server_port=8004)) request, _ = client.request_estimate( property_set=data_set, force_field_source=force_field_source, ) # Wait", "\"openff-1.0.0.offxml\" force_field_source = SmirnoffForceFieldSource.from_path(force_field_path) # Load in the test set.", "adaptive_interval=\"1000ms\", ) with calculation_backend: server = EvaluatorServer( calculation_backend=calculation_backend, working_directory=working_directory, port=8004,", "= PhysicalPropertyDataSet.from_json(\"full_set.json\") # Set up a server object to run", "a server object to run the calculations using. working_directory =", "Set up a server object to run the calculations using.", "main(): setup_timestamp_logging() # Load in the force field force_field_path =", "installed. queue_resources = QueueWorkerResources( number_of_threads=1, number_of_gpus=1, preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA, per_thread_memory_limit=5 * unit.gigabyte,", "using. working_directory = \"working_directory\" # Set up a backend to", "for the results. results, _ = request.results(True, 5) results.json(f\"results.json\") if", "# Load in the test set. data_set = PhysicalPropertyDataSet.from_json(\"full_set.json\") #", "evaluator.utils import setup_timestamp_logging def main(): setup_timestamp_logging() # Load in the", "from evaluator.server import EvaluatorServer from evaluator.utils import setup_timestamp_logging def main():", "the test set. data_set = PhysicalPropertyDataSet.from_json(\"full_set.json\") # Set up a", "maximum_number_of_workers=50, resources_per_worker=queue_resources, queue_name=\"gpuqueue\", setup_script_commands=worker_script_commands, adaptive_interval=\"1000ms\", ) with calculation_backend: server =", "resources with the LSF queue system installed. queue_resources = QueueWorkerResources(", "SmirnoffForceFieldSource from evaluator.server import EvaluatorServer from evaluator.utils import setup_timestamp_logging def", "# Load in the force field force_field_path = \"openff-1.0.0.offxml\" force_field_source", "calculation_backend = DaskLSFBackend( minimum_number_of_workers=1, maximum_number_of_workers=50, resources_per_worker=queue_resources, queue_name=\"gpuqueue\", setup_script_commands=worker_script_commands, adaptive_interval=\"1000ms\", )", "_ = request.results(True, 5) results.json(f\"results.json\") if __name__ == \"__main__\": main()", "activate forcebalance\", \"module load cuda/10.1\"] calculation_backend = DaskLSFBackend( minimum_number_of_workers=1, maximum_number_of_workers=50,", "_ = client.request_estimate( property_set=data_set, force_field_source=force_field_source, ) # Wait for the", "force_field_path = \"openff-1.0.0.offxml\" force_field_source = SmirnoffForceFieldSource.from_path(force_field_path) # Load in the", "server: # Request the estimates. client = EvaluatorClient(ConnectionOptions(server_port=8004)) request, _", "load cuda/10.1\"] calculation_backend = DaskLSFBackend( minimum_number_of_workers=1, maximum_number_of_workers=50, resources_per_worker=queue_resources, queue_name=\"gpuqueue\", setup_script_commands=worker_script_commands,", "unit.gigabyte, wallclock_time_limit=\"05:59\", ) worker_script_commands = [\"conda activate forcebalance\", \"module load", "evaluator.backends.dask import DaskLSFBackend from evaluator.client import ConnectionOptions, EvaluatorClient from evaluator.datasets", "import PhysicalPropertyDataSet from evaluator.forcefield import SmirnoffForceFieldSource from evaluator.server import EvaluatorServer", "data_set = PhysicalPropertyDataSet.from_json(\"full_set.json\") # Set up a server object to", "EvaluatorServer( calculation_backend=calculation_backend, working_directory=working_directory, port=8004, ) with server: # Request the", "= EvaluatorServer( calculation_backend=calculation_backend, working_directory=working_directory, port=8004, ) with server: # Request", "from evaluator.datasets import PhysicalPropertyDataSet from evaluator.forcefield import SmirnoffForceFieldSource from evaluator.server", "Load in the force field force_field_path = \"openff-1.0.0.offxml\" force_field_source =", "up a server object to run the calculations using. working_directory", "the force field force_field_path = \"openff-1.0.0.offxml\" force_field_source = SmirnoffForceFieldSource.from_path(force_field_path) #", "# on a HPC resources with the LSF queue system", "ConnectionOptions, EvaluatorClient from evaluator.datasets import PhysicalPropertyDataSet from evaluator.forcefield import SmirnoffForceFieldSource", "queue_resources = QueueWorkerResources( number_of_threads=1, number_of_gpus=1, preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA, per_thread_memory_limit=5 * unit.gigabyte, wallclock_time_limit=\"05:59\",", "<filename>studies/mixture_feasibility/parsley_benchmark/alcohol_ester/run.py<gh_stars>1-10 from evaluator import unit from evaluator.backends import QueueWorkerResources from", "evaluator.backends import QueueWorkerResources from evaluator.backends.dask import DaskLSFBackend from evaluator.client import", "setup_timestamp_logging() # Load in the force field force_field_path = \"openff-1.0.0.offxml\"", "system installed. queue_resources = QueueWorkerResources( number_of_threads=1, number_of_gpus=1, preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA, per_thread_memory_limit=5 *", "evaluator.forcefield import SmirnoffForceFieldSource from evaluator.server import EvaluatorServer from evaluator.utils import", "preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA, per_thread_memory_limit=5 * unit.gigabyte, wallclock_time_limit=\"05:59\", ) worker_script_commands = [\"conda activate", "estimates. client = EvaluatorClient(ConnectionOptions(server_port=8004)) request, _ = client.request_estimate( property_set=data_set, force_field_source=force_field_source,", "results. results, _ = request.results(True, 5) results.json(f\"results.json\") if __name__ ==", "import setup_timestamp_logging def main(): setup_timestamp_logging() # Load in the force", "field force_field_path = \"openff-1.0.0.offxml\" force_field_source = SmirnoffForceFieldSource.from_path(force_field_path) # Load in", "resources_per_worker=queue_resources, queue_name=\"gpuqueue\", setup_script_commands=worker_script_commands, adaptive_interval=\"1000ms\", ) with calculation_backend: server = EvaluatorServer(", "the calculations using. working_directory = \"working_directory\" # Set up a", "calculation_backend: server = EvaluatorServer( calculation_backend=calculation_backend, working_directory=working_directory, port=8004, ) with server:", "force field force_field_path = \"openff-1.0.0.offxml\" force_field_source = SmirnoffForceFieldSource.from_path(force_field_path) # Load", "import ConnectionOptions, EvaluatorClient from evaluator.datasets import PhysicalPropertyDataSet from evaluator.forcefield import", "force_field_source = SmirnoffForceFieldSource.from_path(force_field_path) # Load in the test set. data_set", "# Set up a backend to run the calculations on.", "HPC resources with the LSF queue system installed. queue_resources =", ") worker_script_commands = [\"conda activate forcebalance\", \"module load cuda/10.1\"] calculation_backend", "PhysicalPropertyDataSet from evaluator.forcefield import SmirnoffForceFieldSource from evaluator.server import EvaluatorServer from", "This assume running # on a HPC resources with the", "the estimates. client = EvaluatorClient(ConnectionOptions(server_port=8004)) request, _ = client.request_estimate( property_set=data_set,", "from evaluator.utils import setup_timestamp_logging def main(): setup_timestamp_logging() # Load in", "forcebalance\", \"module load cuda/10.1\"] calculation_backend = DaskLSFBackend( minimum_number_of_workers=1, maximum_number_of_workers=50, resources_per_worker=queue_resources,", "running # on a HPC resources with the LSF queue", "EvaluatorServer from evaluator.utils import setup_timestamp_logging def main(): setup_timestamp_logging() # Load", "object to run the calculations using. working_directory = \"working_directory\" #", "import DaskLSFBackend from evaluator.client import ConnectionOptions, EvaluatorClient from evaluator.datasets import", "test set. data_set = PhysicalPropertyDataSet.from_json(\"full_set.json\") # Set up a server", "number_of_threads=1, number_of_gpus=1, preferred_gpu_toolkit=QueueWorkerResources.GPUToolkit.CUDA, per_thread_memory_limit=5 * unit.gigabyte, wallclock_time_limit=\"05:59\", ) worker_script_commands =", "working_directory = \"working_directory\" # Set up a backend to run", "calculation_backend=calculation_backend, working_directory=working_directory, port=8004, ) with server: # Request the estimates.", "results, _ = request.results(True, 5) results.json(f\"results.json\") if __name__ == \"__main__\":", "import unit from evaluator.backends import QueueWorkerResources from evaluator.backends.dask import DaskLSFBackend", "in the force field force_field_path = \"openff-1.0.0.offxml\" force_field_source = SmirnoffForceFieldSource.from_path(force_field_path)", "force_field_source=force_field_source, ) # Wait for the results. results, _ =", "evaluator.client import ConnectionOptions, EvaluatorClient from evaluator.datasets import PhysicalPropertyDataSet from evaluator.forcefield", "backend to run the calculations on. This assume running #", "property_set=data_set, force_field_source=force_field_source, ) # Wait for the results. results, _", "run the calculations on. This assume running # on a" ]
[ "repeat runtime test for _ in range(self.n_repeat_trials): observation = IDMAgents(", "iteration.time_point.time_s, ) observation.update_observation(iteration, next_iteration, buffer) profiler.stop() if self.display_results: logger.info(profiler.output_text(unicode=True, color=True))", "next_iteration = SimulationIteration(time_point=self.scenario.get_time_point(step + 1), index=step + 1) buffer =", "import SimulationIteration logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) class TestProfileIDM(unittest.TestCase): \"\"\" Profiling", "many times to repeat runtime test for _ in range(self.n_repeat_trials):", "= 1 self.display_results = True self.scenario = get_test_nuplan_scenario() def test_profile_idm_agent_observation(self)", "= IDMAgents( target_velocity=10, min_gap_to_lead_agent=0.5, headway_time=1.5, accel_max=1.0, decel_max=2.0, scenario=self.scenario, ) for", "- iteration.time_point.time_s, ) observation.update_observation(iteration, next_iteration, buffer) profiler.stop() if self.display_results: logger.info(profiler.output_text(unicode=True,", "accel_max=1.0, decel_max=2.0, scenario=self.scenario, ) for step in range(self.scenario.get_number_of_iterations() - 1):", "nuplan.planning.simulation.history.simulation_history_buffer import SimulationHistoryBuffer from nuplan.planning.simulation.observation.idm_agents import IDMAgents from nuplan.planning.simulation.simulation_time_controller.simulation_iteration import", "import logging import unittest from pyinstrument import Profiler from nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils", "= logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) class TestProfileIDM(unittest.TestCase): \"\"\" Profiling test for IDM", "+ 1) buffer = SimulationHistoryBuffer.initialize_from_list( 1, [self.scenario.get_ego_state_at_iteration(step)], [self.scenario.get_tracked_objects_at_iteration(step)], next_iteration.time_point.time_s -", "1), index=step + 1) buffer = SimulationHistoryBuffer.initialize_from_list( 1, [self.scenario.get_ego_state_at_iteration(step)], [self.scenario.get_tracked_objects_at_iteration(step)],", "- 1): iteration = SimulationIteration(time_point=self.scenario.get_time_point(step), index=step) next_iteration = SimulationIteration(time_point=self.scenario.get_time_point(step +", "import SimulationHistoryBuffer from nuplan.planning.simulation.observation.idm_agents import IDMAgents from nuplan.planning.simulation.simulation_time_controller.simulation_iteration import SimulationIteration", "\"\"\" def setUp(self) -> None: \"\"\" Inherited, see super class.", "for _ in range(self.n_repeat_trials): observation = IDMAgents( target_velocity=10, min_gap_to_lead_agent=0.5, headway_time=1.5,", "1, [self.scenario.get_ego_state_at_iteration(step)], [self.scenario.get_tracked_objects_at_iteration(step)], next_iteration.time_point.time_s - iteration.time_point.time_s, ) observation.update_observation(iteration, next_iteration, buffer)", "IDMAgents.\"\"\" profiler = Profiler(interval=0.0001) profiler.start() # How many times to", "index=step) next_iteration = SimulationIteration(time_point=self.scenario.get_time_point(step + 1), index=step + 1) buffer", "nuplan.planning.simulation.simulation_time_controller.simulation_iteration import SimulationIteration logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) class TestProfileIDM(unittest.TestCase): \"\"\"", "\"\"\"Profile IDMAgents.\"\"\" profiler = Profiler(interval=0.0001) profiler.start() # How many times", "profiler = Profiler(interval=0.0001) profiler.start() # How many times to repeat", "How many times to repeat runtime test for _ in", "\"\"\" Inherited, see super class. \"\"\" self.n_repeat_trials = 1 self.display_results", "for step in range(self.scenario.get_number_of_iterations() - 1): iteration = SimulationIteration(time_point=self.scenario.get_time_point(step), index=step)", "nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils import get_test_nuplan_scenario from nuplan.planning.simulation.history.simulation_history_buffer import SimulationHistoryBuffer from nuplan.planning.simulation.observation.idm_agents import", "from pyinstrument import Profiler from nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils import get_test_nuplan_scenario from nuplan.planning.simulation.history.simulation_history_buffer", "Profiler from nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils import get_test_nuplan_scenario from nuplan.planning.simulation.history.simulation_history_buffer import SimulationHistoryBuffer from", ") observation.update_observation(iteration, next_iteration, buffer) profiler.stop() if self.display_results: logger.info(profiler.output_text(unicode=True, color=True)) if", "SimulationIteration(time_point=self.scenario.get_time_point(step), index=step) next_iteration = SimulationIteration(time_point=self.scenario.get_time_point(step + 1), index=step + 1)", "min_gap_to_lead_agent=0.5, headway_time=1.5, accel_max=1.0, decel_max=2.0, scenario=self.scenario, ) for step in range(self.scenario.get_number_of_iterations()", "observation.update_observation(iteration, next_iteration, buffer) profiler.stop() if self.display_results: logger.info(profiler.output_text(unicode=True, color=True)) if __name__", "headway_time=1.5, accel_max=1.0, decel_max=2.0, scenario=self.scenario, ) for step in range(self.scenario.get_number_of_iterations() -", "step in range(self.scenario.get_number_of_iterations() - 1): iteration = SimulationIteration(time_point=self.scenario.get_time_point(step), index=step) next_iteration", "= SimulationHistoryBuffer.initialize_from_list( 1, [self.scenario.get_ego_state_at_iteration(step)], [self.scenario.get_tracked_objects_at_iteration(step)], next_iteration.time_point.time_s - iteration.time_point.time_s, ) observation.update_observation(iteration,", "\"\"\" Profiling test for IDM agents. \"\"\" def setUp(self) ->", "from nuplan.planning.simulation.simulation_time_controller.simulation_iteration import SimulationIteration logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) class TestProfileIDM(unittest.TestCase):", "def setUp(self) -> None: \"\"\" Inherited, see super class. \"\"\"", "to repeat runtime test for _ in range(self.n_repeat_trials): observation =", "IDM agents. \"\"\" def setUp(self) -> None: \"\"\" Inherited, see", "buffer = SimulationHistoryBuffer.initialize_from_list( 1, [self.scenario.get_ego_state_at_iteration(step)], [self.scenario.get_tracked_objects_at_iteration(step)], next_iteration.time_point.time_s - iteration.time_point.time_s, )", "index=step + 1) buffer = SimulationHistoryBuffer.initialize_from_list( 1, [self.scenario.get_ego_state_at_iteration(step)], [self.scenario.get_tracked_objects_at_iteration(step)], next_iteration.time_point.time_s", "logging.basicConfig(level=logging.INFO) class TestProfileIDM(unittest.TestCase): \"\"\" Profiling test for IDM agents. \"\"\"", "target_velocity=10, min_gap_to_lead_agent=0.5, headway_time=1.5, accel_max=1.0, decel_max=2.0, scenario=self.scenario, ) for step in", "Profiler(interval=0.0001) profiler.start() # How many times to repeat runtime test", "logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) class TestProfileIDM(unittest.TestCase): \"\"\" Profiling test for", "get_test_nuplan_scenario() def test_profile_idm_agent_observation(self) -> None: \"\"\"Profile IDMAgents.\"\"\" profiler = Profiler(interval=0.0001)", "True self.scenario = get_test_nuplan_scenario() def test_profile_idm_agent_observation(self) -> None: \"\"\"Profile IDMAgents.\"\"\"", "from nuplan.planning.simulation.history.simulation_history_buffer import SimulationHistoryBuffer from nuplan.planning.simulation.observation.idm_agents import IDMAgents from nuplan.planning.simulation.simulation_time_controller.simulation_iteration", "SimulationIteration(time_point=self.scenario.get_time_point(step + 1), index=step + 1) buffer = SimulationHistoryBuffer.initialize_from_list( 1,", "import IDMAgents from nuplan.planning.simulation.simulation_time_controller.simulation_iteration import SimulationIteration logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO)", "buffer) profiler.stop() if self.display_results: logger.info(profiler.output_text(unicode=True, color=True)) if __name__ == \"__main__\":", "1): iteration = SimulationIteration(time_point=self.scenario.get_time_point(step), index=step) next_iteration = SimulationIteration(time_point=self.scenario.get_time_point(step + 1),", "# How many times to repeat runtime test for _", "= True self.scenario = get_test_nuplan_scenario() def test_profile_idm_agent_observation(self) -> None: \"\"\"Profile", "scenario=self.scenario, ) for step in range(self.scenario.get_number_of_iterations() - 1): iteration =", "nuplan.planning.simulation.observation.idm_agents import IDMAgents from nuplan.planning.simulation.simulation_time_controller.simulation_iteration import SimulationIteration logger = logging.getLogger(__name__)", "-> None: \"\"\"Profile IDMAgents.\"\"\" profiler = Profiler(interval=0.0001) profiler.start() # How", "None: \"\"\"Profile IDMAgents.\"\"\" profiler = Profiler(interval=0.0001) profiler.start() # How many", "[self.scenario.get_ego_state_at_iteration(step)], [self.scenario.get_tracked_objects_at_iteration(step)], next_iteration.time_point.time_s - iteration.time_point.time_s, ) observation.update_observation(iteration, next_iteration, buffer) profiler.stop()", "self.scenario = get_test_nuplan_scenario() def test_profile_idm_agent_observation(self) -> None: \"\"\"Profile IDMAgents.\"\"\" profiler", "test for IDM agents. \"\"\" def setUp(self) -> None: \"\"\"", "iteration = SimulationIteration(time_point=self.scenario.get_time_point(step), index=step) next_iteration = SimulationIteration(time_point=self.scenario.get_time_point(step + 1), index=step", "= SimulationIteration(time_point=self.scenario.get_time_point(step + 1), index=step + 1) buffer = SimulationHistoryBuffer.initialize_from_list(", "setUp(self) -> None: \"\"\" Inherited, see super class. \"\"\" self.n_repeat_trials", "pyinstrument import Profiler from nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils import get_test_nuplan_scenario from nuplan.planning.simulation.history.simulation_history_buffer import", "decel_max=2.0, scenario=self.scenario, ) for step in range(self.scenario.get_number_of_iterations() - 1): iteration", "1) buffer = SimulationHistoryBuffer.initialize_from_list( 1, [self.scenario.get_ego_state_at_iteration(step)], [self.scenario.get_tracked_objects_at_iteration(step)], next_iteration.time_point.time_s - iteration.time_point.time_s,", "next_iteration, buffer) profiler.stop() if self.display_results: logger.info(profiler.output_text(unicode=True, color=True)) if __name__ ==", "SimulationHistoryBuffer.initialize_from_list( 1, [self.scenario.get_ego_state_at_iteration(step)], [self.scenario.get_tracked_objects_at_iteration(step)], next_iteration.time_point.time_s - iteration.time_point.time_s, ) observation.update_observation(iteration, next_iteration,", "self.display_results = True self.scenario = get_test_nuplan_scenario() def test_profile_idm_agent_observation(self) -> None:", "class TestProfileIDM(unittest.TestCase): \"\"\" Profiling test for IDM agents. \"\"\" def", "next_iteration.time_point.time_s - iteration.time_point.time_s, ) observation.update_observation(iteration, next_iteration, buffer) profiler.stop() if self.display_results:", "TestProfileIDM(unittest.TestCase): \"\"\" Profiling test for IDM agents. \"\"\" def setUp(self)", "in range(self.scenario.get_number_of_iterations() - 1): iteration = SimulationIteration(time_point=self.scenario.get_time_point(step), index=step) next_iteration =", "-> None: \"\"\" Inherited, see super class. \"\"\" self.n_repeat_trials =", "= Profiler(interval=0.0001) profiler.start() # How many times to repeat runtime", "_ in range(self.n_repeat_trials): observation = IDMAgents( target_velocity=10, min_gap_to_lead_agent=0.5, headway_time=1.5, accel_max=1.0,", "self.n_repeat_trials = 1 self.display_results = True self.scenario = get_test_nuplan_scenario() def", "import unittest from pyinstrument import Profiler from nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils import get_test_nuplan_scenario", "import get_test_nuplan_scenario from nuplan.planning.simulation.history.simulation_history_buffer import SimulationHistoryBuffer from nuplan.planning.simulation.observation.idm_agents import IDMAgents", "profiler.stop() if self.display_results: logger.info(profiler.output_text(unicode=True, color=True)) if __name__ == \"__main__\": unittest.main()", "in range(self.n_repeat_trials): observation = IDMAgents( target_velocity=10, min_gap_to_lead_agent=0.5, headway_time=1.5, accel_max=1.0, decel_max=2.0,", ") for step in range(self.scenario.get_number_of_iterations() - 1): iteration = SimulationIteration(time_point=self.scenario.get_time_point(step),", "see super class. \"\"\" self.n_repeat_trials = 1 self.display_results = True", "test for _ in range(self.n_repeat_trials): observation = IDMAgents( target_velocity=10, min_gap_to_lead_agent=0.5,", "[self.scenario.get_tracked_objects_at_iteration(step)], next_iteration.time_point.time_s - iteration.time_point.time_s, ) observation.update_observation(iteration, next_iteration, buffer) profiler.stop() if", "test_profile_idm_agent_observation(self) -> None: \"\"\"Profile IDMAgents.\"\"\" profiler = Profiler(interval=0.0001) profiler.start() #", "agents. \"\"\" def setUp(self) -> None: \"\"\" Inherited, see super", "+ 1), index=step + 1) buffer = SimulationHistoryBuffer.initialize_from_list( 1, [self.scenario.get_ego_state_at_iteration(step)],", "SimulationHistoryBuffer from nuplan.planning.simulation.observation.idm_agents import IDMAgents from nuplan.planning.simulation.simulation_time_controller.simulation_iteration import SimulationIteration logger", "runtime test for _ in range(self.n_repeat_trials): observation = IDMAgents( target_velocity=10,", "def test_profile_idm_agent_observation(self) -> None: \"\"\"Profile IDMAgents.\"\"\" profiler = Profiler(interval=0.0001) profiler.start()", "from nuplan.planning.simulation.observation.idm_agents import IDMAgents from nuplan.planning.simulation.simulation_time_controller.simulation_iteration import SimulationIteration logger =", "class. \"\"\" self.n_repeat_trials = 1 self.display_results = True self.scenario =", "observation = IDMAgents( target_velocity=10, min_gap_to_lead_agent=0.5, headway_time=1.5, accel_max=1.0, decel_max=2.0, scenario=self.scenario, )", "times to repeat runtime test for _ in range(self.n_repeat_trials): observation", "logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) class TestProfileIDM(unittest.TestCase): \"\"\" Profiling test for IDM agents.", "unittest from pyinstrument import Profiler from nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils import get_test_nuplan_scenario from", "profiler.start() # How many times to repeat runtime test for", "SimulationIteration logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) class TestProfileIDM(unittest.TestCase): \"\"\" Profiling test", "from nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils import get_test_nuplan_scenario from nuplan.planning.simulation.history.simulation_history_buffer import SimulationHistoryBuffer from nuplan.planning.simulation.observation.idm_agents", "Inherited, see super class. \"\"\" self.n_repeat_trials = 1 self.display_results =", "\"\"\" self.n_repeat_trials = 1 self.display_results = True self.scenario = get_test_nuplan_scenario()", "1 self.display_results = True self.scenario = get_test_nuplan_scenario() def test_profile_idm_agent_observation(self) ->", "range(self.scenario.get_number_of_iterations() - 1): iteration = SimulationIteration(time_point=self.scenario.get_time_point(step), index=step) next_iteration = SimulationIteration(time_point=self.scenario.get_time_point(step", "= SimulationIteration(time_point=self.scenario.get_time_point(step), index=step) next_iteration = SimulationIteration(time_point=self.scenario.get_time_point(step + 1), index=step +", "IDMAgents from nuplan.planning.simulation.simulation_time_controller.simulation_iteration import SimulationIteration logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO) class", "for IDM agents. \"\"\" def setUp(self) -> None: \"\"\" Inherited,", "IDMAgents( target_velocity=10, min_gap_to_lead_agent=0.5, headway_time=1.5, accel_max=1.0, decel_max=2.0, scenario=self.scenario, ) for step", "logging import unittest from pyinstrument import Profiler from nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils import", "= get_test_nuplan_scenario() def test_profile_idm_agent_observation(self) -> None: \"\"\"Profile IDMAgents.\"\"\" profiler =", "None: \"\"\" Inherited, see super class. \"\"\" self.n_repeat_trials = 1", "get_test_nuplan_scenario from nuplan.planning.simulation.history.simulation_history_buffer import SimulationHistoryBuffer from nuplan.planning.simulation.observation.idm_agents import IDMAgents from", "range(self.n_repeat_trials): observation = IDMAgents( target_velocity=10, min_gap_to_lead_agent=0.5, headway_time=1.5, accel_max=1.0, decel_max=2.0, scenario=self.scenario,", "import Profiler from nuplan.planning.scenario_builder.nuplan_db.test.nuplan_scenario_test_utils import get_test_nuplan_scenario from nuplan.planning.simulation.history.simulation_history_buffer import SimulationHistoryBuffer", "Profiling test for IDM agents. \"\"\" def setUp(self) -> None:", "super class. \"\"\" self.n_repeat_trials = 1 self.display_results = True self.scenario" ]
[ "tf.train.Checkpoint(model=agent) manager = tf.train.CheckpointManager(ckpt, load_path, max_to_keep=None) ckpt.restore(manager.latest_checkpoint) print(\"Restoring from {}\".format(manager.latest_checkpoint))", "in range(nb_rollout_steps): # Predict next action. action, q, _, _", "rank == 0 and logdir: if hasattr(env, 'get_state'): with open(os.path.join(logdir,", "numpy as np try: from mpi4py import MPI except ImportError:", "= [] eval_qs = [] if eval_env is not None:", "* np.ones(nb_actions)) else: raise RuntimeError('unknown noise type \"{}\"'.format(current_noise_type)) max_action =", "= osp.expanduser(load_path) ckpt = tf.train.Checkpoint(model=agent) manager = tf.train.CheckpointManager(ckpt, load_path, max_to_keep=None)", "= agent.step(tf.constant(obs), apply_noise=True, compute_Q=True) action, q = action.numpy(), q.numpy() #", ": v / mpi_size for (k,v) in zip(combined_stats.keys(), combined_stats_sums)} #", "epoch_adaptive_distances.append(distance) cl, al = agent.train() epoch_critic_losses.append(cl) epoch_actor_losses.append(al) agent.update_target_net() # Evaluate.", "obs = env.reset() if eval_env is not None: eval_obs =", "ValueError('expected scalar, got %s'%x) combined_stats_sums = np.array([ np.array(x).flatten()[0] for x", "env, seed=None, total_timesteps=None, nb_epochs=None, # with default settings, perform 1M", "osp.expanduser(load_path) ckpt = tf.train.Checkpoint(model=agent) manager = tf.train.CheckpointManager(ckpt, load_path, max_to_keep=None) ckpt.restore(manager.latest_checkpoint)", "[] epoch_actions = [] epoch_qs = [] epoch_episodes = 0", "if done[d]: # Episode done. epoch_episode_rewards.append(episode_reward[d]) episode_rewards_history.append(episode_reward[d]) epoch_episode_steps.append(episode_step[d]) episode_reward[d] =", "\"{}\"'.format(current_noise_type)) max_action = env.action_space.high logger.info('scaling actions by {} before executing", "is not None: combined_stats_sums = MPI.COMM_WORLD.allreduce(combined_stats_sums) combined_stats = {k :", "= np.std(episode_rewards_history) combined_stats['rollout/episode_steps'] = np.mean(epoch_episode_steps) combined_stats['rollout/actions_mean'] = np.mean(epoch_actions) combined_stats['rollout/Q_mean'] =", "AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev)) elif 'normal' in current_noise_type: _, stddev = current_noise_type.split('_')", "impossible to reset agent at the end of the episode", "= 0.0 if MPI is not None: mpi_size = MPI.COMM_WORLD.Get_size()", "from baselines.ddpg.ddpg_learner import DDPG from baselines.ddpg.models import Actor, Critic from", "if rank == 0 and render: env.render() episode_reward += r", "return x[0] elif np.isscalar(x): return x else: raise ValueError('expected scalar,", "range(nb_epoch_cycles): # Perform rollouts. if nenvs > 1: # if", "Actor, Critic from baselines.ddpg.memory import Memory from baselines.ddpg.noise import AdaptiveParamNoiseSpec,", "these outputs are batched from vecenv t += 1 if", "Actor(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) action_noise = None param_noise = None", "apply_noise=True, compute_Q=True) action, q = action.numpy(), q.numpy() # Execute next", "= np.mean(epoch_actor_losses) combined_stats['train/loss_critic'] = np.mean(epoch_critic_losses) combined_stats['train/param_noise_distance'] = np.mean(epoch_adaptive_distances) combined_stats['total/duration'] =", "= eval_obs.shape[0] eval_episode_reward = np.zeros(nenvs_eval, dtype = np.float32) for t_rollout", "set_global_seeds from baselines import logger import tensorflow as tf import", "time from collections import deque import pickle from baselines.ddpg.ddpg_learner import", "nb_rollout_steps=100, reward_scale=1.0, render=False, render_eval=False, noise_type='adaptive-param_0.2', normalize_returns=False, normalize_observations=True, critic_l2_reg=1e-2, actor_lr=1e-4, critic_lr=1e-3,", "nenvs_eval = eval_obs.shape[0] eval_episode_reward = np.zeros(nenvs_eval, dtype = np.float32) for", "= [] if eval_env is not None: nenvs_eval = eval_obs.shape[0]", "0 start_time = time.time() epoch_episode_rewards = [] epoch_episode_steps = []", "= [] epoch_episodes = 0 for epoch in range(nb_epochs): for", "= 1 # Log stats. # XXX shouldn't call np.mean", "Memory(limit=int(1e6), action_shape=env.action_space.shape, observation_shape=env.observation_space.shape) critic = Critic(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) actor", "f: pickle.dump(env.get_state(), f) if eval_env and hasattr(eval_env, 'get_state'): with open(os.path.join(logdir,", "open(os.path.join(logdir, 'env_state.pkl'), 'wb') as f: pickle.dump(env.get_state(), f) if eval_env and", "param_noise = AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev)) elif 'normal' in current_noise_type: _, stddev", "import DDPG from baselines.ddpg.models import Actor, Critic from baselines.ddpg.memory import", "np.mean(epoch_qs) combined_stats['train/loss_actor'] = np.mean(epoch_actor_losses) combined_stats['train/loss_critic'] = np.mean(epoch_critic_losses) combined_stats['train/param_noise_distance'] = np.mean(epoch_adaptive_distances)", "from {}\".format(manager.latest_checkpoint)) eval_episode_rewards_history = deque(maxlen=100) episode_rewards_history = deque(maxlen=100) # Prepare", "= np.zeros(nenvs, dtype = np.float32) #vector episode_step = np.zeros(nenvs, dtype", "assert x.size == 1 return x[0] elif np.isscalar(x): return x", "with open(os.path.join(logdir, 'env_state.pkl'), 'wb') as f: pickle.dump(env.get_state(), f) if eval_env", "eval_obs.shape[0] eval_episode_reward = np.zeros(nenvs_eval, dtype = np.float32) for t_rollout in", "append. obs = new_obs for d in range(len(done)): if done[d]:", "in range(nb_eval_steps): eval_action, eval_q, _, _ = agent.step(eval_obs, apply_noise=False, compute_Q=True)", "epoch_actor_losses = [] epoch_critic_losses = [] epoch_adaptive_distances = [] for", "== 0: batch = agent.memory.sample(batch_size=batch_size) obs0 = tf.constant(batch['obs0']) distance =", "episode_reward += r episode_step += 1 # Book-keeping. epoch_actions.append(action) epoch_qs.append(q)", "= 0. episode_step[d] = 0 epoch_episodes += 1 episodes +=", "rank == 0 and render: env.render() episode_reward += r episode_step", "np.array([ np.array(x).flatten()[0] for x in combined_stats.values()]) if MPI is not", "cycle in range(nb_epoch_cycles): # Perform rollouts. if nenvs > 1:", "per MPI worker tau=0.01, eval_env=None, param_noise_adaption_interval=50, load_path=None, **network_kwargs): set_global_seeds(seed) if", "import AdaptiveParamNoiseSpec, NormalActionNoise, OrnsteinUhlenbeckActionNoise from baselines.common import set_global_seeds from baselines", "nb_epochs = int(total_timesteps) // (nb_epoch_cycles * nb_rollout_steps) else: nb_epochs =", "env.action_space.high).all() # we assume symmetric actions. memory = Memory(limit=int(1e6), action_shape=env.action_space.shape,", "if MPI is not None: rank = MPI.COMM_WORLD.Get_rank() else: rank", "scalar epoch = 0 start_time = time.time() epoch_episode_rewards = []", "eval_episode_rewards = [] eval_qs = [] if eval_env is not", "start_time stats = agent.get_stats() combined_stats = stats.copy() combined_stats['rollout/return'] = np.mean(epoch_episode_rewards)", "combined_stats['total/epochs'] = epoch + 1 combined_stats['total/steps'] = t for key", "pickle.dump(env.get_state(), f) if eval_env and hasattr(eval_env, 'get_state'): with open(os.path.join(logdir, 'eval_env_state.pkl'),", "actor_lr=1e-4, critic_lr=1e-3, popart=False, gamma=0.99, clip_norm=None, nb_train_steps=50, # per epoch cycle", "rank == 0 and render: env.render() # max_action is of", "np.mean(epoch_episode_rewards) combined_stats['rollout/return_std'] = np.std(epoch_episode_rewards) combined_stats['rollout/return_history'] = np.mean(episode_rewards_history) combined_stats['rollout/return_history_std'] = np.std(episode_rewards_history)", "mpi_size for (k,v) in zip(combined_stats.keys(), combined_stats_sums)} # Total statistics. combined_stats['total/epochs']", "NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) elif 'ou' in current_noise_type: _, stddev", "MPI = None def learn(network, env, seed=None, total_timesteps=None, nb_epochs=None, #", "combined_stats.values()]) if MPI is not None: combined_stats_sums = MPI.COMM_WORLD.allreduce(combined_stats_sums) combined_stats", "action, q = action.numpy(), q.numpy() # Execute next action. if", "on variable length lists duration = time.time() - start_time stats", "# Log stats. # XXX shouldn't call np.mean on variable", "NormalActionNoise, OrnsteinUhlenbeckActionNoise from baselines.common import set_global_seeds from baselines import logger", "= time.time() epoch_episode_rewards = [] epoch_episode_steps = [] epoch_actions =", "batch_size=64, # per MPI worker tau=0.01, eval_env=None, param_noise_adaption_interval=50, load_path=None, **network_kwargs):", "(np.abs(env.action_space.low) == env.action_space.high).all() # we assume symmetric actions. memory =", "float(duration) combined_stats['total/episodes'] = episodes combined_stats['rollout/episodes'] = epoch_episodes combined_stats['rollout/actions_std'] = np.std(epoch_actions)", "np.std(episode_rewards_history) combined_stats['rollout/episode_steps'] = np.mean(epoch_episode_steps) combined_stats['rollout/actions_mean'] = np.mean(epoch_actions) combined_stats['rollout/Q_mean'] = np.mean(epoch_qs)", "= len(eval_episode_rewards) def as_scalar(x): if isinstance(x, np.ndarray): assert x.size ==", "end of the episode in each # of the environments,", "DDPG is concerned, every action is in [-1, 1]) #", "action is in [-1, 1]) # note these outputs are", "= t for key in sorted(combined_stats.keys()): logger.record_tabular(key, combined_stats[key]) if rank", "= deque(maxlen=100) episode_rewards_history = deque(maxlen=100) # Prepare everything. agent.initialize() agent.reset()", "network=network, **network_kwargs) action_noise = None param_noise = None if noise_type", "agent.update_target_net() # Evaluate. eval_episode_rewards = [] eval_qs = [] if", "combined_stats['rollout/return_history'] = np.mean(episode_rewards_history) combined_stats['rollout/return_history_std'] = np.std(episode_rewards_history) combined_stats['rollout/episode_steps'] = np.mean(epoch_episode_steps) combined_stats['rollout/actions_mean']", "lists duration = time.time() - start_time stats = agent.get_stats() combined_stats", "is concerned, every action is in [-1, 1]) if render_eval:", "duration combined_stats['total/steps_per_second'] = float(t) / float(duration) combined_stats['total/episodes'] = episodes combined_stats['rollout/episodes']", "epoch_qs = [] epoch_episodes = 0 for epoch in range(nb_epochs):", "settings, perform 1M steps total nb_epoch_cycles=20, nb_rollout_steps=100, reward_scale=1.0, render=False, render_eval=False,", "None: load_path = osp.expanduser(load_path) ckpt = tf.train.Checkpoint(model=agent) manager = tf.train.CheckpointManager(ckpt,", "eval_episode_reward[d] = 0.0 if MPI is not None: mpi_size =", "reset agent at the end of the episode in each", "action_shape=env.action_space.shape, observation_shape=env.observation_space.shape) critic = Critic(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) actor =", "logger.get_dir() if rank == 0 and logdir: if hasattr(env, 'get_state'):", "# vector episodes = 0 #scalar t = 0 #", "eval_done[d]: eval_episode_rewards.append(eval_episode_reward[d]) eval_episode_rewards_history.append(eval_episode_reward[d]) eval_episode_reward[d] = 0.0 if MPI is not", "= np.mean(eval_episode_rewards_history) combined_stats['eval/Q'] = eval_qs combined_stats['eval/episodes'] = len(eval_episode_rewards) def as_scalar(x):", "eval_qs.append(eval_q) for d in range(len(eval_done)): if eval_done[d]: eval_episode_rewards.append(eval_episode_reward[d]) eval_episode_rewards_history.append(eval_episode_reward[d]) eval_episode_reward[d]", "deque(maxlen=100) episode_rewards_history = deque(maxlen=100) # Prepare everything. agent.initialize() agent.reset() obs", "at the end of the episode in each # of", "parallel, impossible to reset agent at the end of the", "eval_r, eval_done, eval_info = eval_env.step(max_action * eval_action) # scale for", "= 0 nb_actions = env.action_space.shape[-1] assert (np.abs(env.action_space.low) == env.action_space.high).all() #", "'get_state'): with open(os.path.join(logdir, 'eval_env_state.pkl'), 'wb') as f: pickle.dump(eval_env.get_state(), f) return", "= np.float32) for t_rollout in range(nb_eval_steps): eval_action, eval_q, _, _", "1M steps total nb_epoch_cycles=20, nb_rollout_steps=100, reward_scale=1.0, render=False, render_eval=False, noise_type='adaptive-param_0.2', normalize_returns=False,", "for current_noise_type in noise_type.split(','): current_noise_type = current_noise_type.strip() if current_noise_type ==", "= time.time() - start_time stats = agent.get_stats() combined_stats = stats.copy()", "hasattr(env, 'get_state'): with open(os.path.join(logdir, 'env_state.pkl'), 'wb') as f: pickle.dump(env.get_state(), f)", "pickle from baselines.ddpg.ddpg_learner import DDPG from baselines.ddpg.models import Actor, Critic", "clip_norm=clip_norm, reward_scale=reward_scale) logger.info('Using agent with the following configuration:') logger.info(str(agent.__dict__.items())) if", "#the batched data will be unrolled in memory.py's append. obs", "if rank == 0: logger.dump_tabular() logger.info('') logdir = logger.get_dir() if", "q = action.numpy(), q.numpy() # Execute next action. if rank", "np.zeros(nenvs_eval, dtype = np.float32) for t_rollout in range(nb_eval_steps): eval_action, eval_q,", "= AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev)) elif 'normal' in current_noise_type: _, stddev =", "_, _ = agent.step(eval_obs, apply_noise=False, compute_Q=True) eval_obs, eval_r, eval_done, eval_info", "= float(t) / float(duration) combined_stats['total/episodes'] = episodes combined_stats['rollout/episodes'] = epoch_episodes", "Execute next action. if rank == 0 and render: env.render()", "_, stddev = current_noise_type.split('_') action_noise = OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions))", "d in range(len(done)): if done[d]: # Episode done. epoch_episode_rewards.append(episode_reward[d]) episode_rewards_history.append(episode_reward[d])", "= agent.adapt_param_noise(obs0) epoch_adaptive_distances.append(distance) cl, al = agent.train() epoch_critic_losses.append(cl) epoch_actor_losses.append(al) agent.update_target_net()", "'normal' in current_noise_type: _, stddev = current_noise_type.split('_') action_noise = NormalActionNoise(mu=np.zeros(nb_actions),", "current_noise_type in noise_type.split(','): current_noise_type = current_noise_type.strip() if current_noise_type == 'none':", "0 and render: env.render() # max_action is of dimension A,", "in [-1, 1]) # note these outputs are batched from", "= deque(maxlen=100) # Prepare everything. agent.initialize() agent.reset() obs = env.reset()", "for t_train in range(nb_train_steps): # Adapt param noise, if necessary.", "= episodes combined_stats['rollout/episodes'] = epoch_episodes combined_stats['rollout/actions_std'] = np.std(epoch_actions) # Evaluation", "info = env.step(max_action * action) # scale for execution in", "memory, env.observation_space.shape, env.action_space.shape, gamma=gamma, tau=tau, normalize_returns=normalize_returns, normalize_observations=normalize_observations, batch_size=batch_size, action_noise=action_noise, param_noise=param_noise,", "done, info = env.step(max_action * action) # scale for execution", "= current_noise_type.split('_') action_noise = OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) else: raise", "epoch_episode_rewards = [] epoch_episode_steps = [] epoch_actions = [] epoch_qs", "eval_env and hasattr(eval_env, 'get_state'): with open(os.path.join(logdir, 'eval_env_state.pkl'), 'wb') as f:", "nb_rollout_steps) else: nb_epochs = 500 if MPI is not None:", "= 0 #scalar t = 0 # scalar epoch =", "and logdir: if hasattr(env, 'get_state'): with open(os.path.join(logdir, 'env_state.pkl'), 'wb') as", "pass elif 'adaptive-param' in current_noise_type: _, stddev = current_noise_type.split('_') param_noise", "execution in env (as far as DDPG is concerned, every", "Book-keeping. epoch_actions.append(action) epoch_qs.append(q) agent.store_transition(obs, action, r, new_obs, done) #the batched", "critic_l2_reg=1e-2, actor_lr=1e-4, critic_lr=1e-3, popart=False, gamma=0.99, clip_norm=None, nb_train_steps=50, # per epoch", "eval_env=None, param_noise_adaption_interval=50, load_path=None, **network_kwargs): set_global_seeds(seed) if total_timesteps is not None:", "if eval_env is not None: nenvs_eval = eval_obs.shape[0] eval_episode_reward =", "= np.mean(episode_rewards_history) combined_stats['rollout/return_history_std'] = np.std(episode_rewards_history) combined_stats['rollout/episode_steps'] = np.mean(epoch_episode_steps) combined_stats['rollout/actions_mean'] =", "is not None: rank = MPI.COMM_WORLD.Get_rank() else: rank = 0", "# Perform rollouts. if nenvs > 1: # if simulating", "executing in env'.format(max_action)) agent = DDPG(actor, critic, memory, env.observation_space.shape, env.action_space.shape,", "= MPI.COMM_WORLD.Get_size() else: mpi_size = 1 # Log stats. #", "is not None: load_path = osp.expanduser(load_path) ckpt = tf.train.Checkpoint(model=agent) manager", "# Execute next action. if rank == 0 and render:", "epoch_episodes = 0 for epoch in range(nb_epochs): for cycle in", "apply_noise=False, compute_Q=True) eval_obs, eval_r, eval_done, eval_info = eval_env.step(max_action * eval_action)", "nenvs == 1: agent.reset() # Train. epoch_actor_losses = [] epoch_critic_losses", "[] if eval_env is not None: nenvs_eval = eval_obs.shape[0] eval_episode_reward", "combined_stats = stats.copy() combined_stats['rollout/return'] = np.mean(epoch_episode_rewards) combined_stats['rollout/return_std'] = np.std(epoch_episode_rewards) combined_stats['rollout/return_history']", "= env.reset() if eval_env is not None: eval_obs = eval_env.reset()", "action is in [-1, 1]) if render_eval: eval_env.render() eval_episode_reward +=", "MPI worker tau=0.01, eval_env=None, param_noise_adaption_interval=50, load_path=None, **network_kwargs): set_global_seeds(seed) if total_timesteps", "== 1 return x[0] elif np.isscalar(x): return x else: raise", "action, r, new_obs, done) #the batched data will be unrolled", "batch = agent.memory.sample(batch_size=batch_size) obs0 = tf.constant(batch['obs0']) distance = agent.adapt_param_noise(obs0) epoch_adaptive_distances.append(distance)", "np.float32) for t_rollout in range(nb_eval_steps): eval_action, eval_q, _, _ =", "combined_stats['rollout/return'] = np.mean(epoch_episode_rewards) combined_stats['rollout/return_std'] = np.std(epoch_episode_rewards) combined_stats['rollout/return_history'] = np.mean(episode_rewards_history) combined_stats['rollout/return_history_std']", "combined_stats['rollout/return_history_std'] = np.std(episode_rewards_history) combined_stats['rollout/episode_steps'] = np.mean(epoch_episode_steps) combined_stats['rollout/actions_mean'] = np.mean(epoch_actions) combined_stats['rollout/Q_mean']", "eval_episode_rewards_history = deque(maxlen=100) episode_rewards_history = deque(maxlen=100) # Prepare everything. agent.initialize()", "+= 1 # Book-keeping. epoch_actions.append(action) epoch_qs.append(q) agent.store_transition(obs, action, r, new_obs,", "t_train % param_noise_adaption_interval == 0: batch = agent.memory.sample(batch_size=batch_size) obs0 =", "= Critic(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) actor = Actor(nb_actions, ob_shape=env.observation_space.shape, network=network,", "is not None: nenvs_eval = eval_obs.shape[0] eval_episode_reward = np.zeros(nenvs_eval, dtype", "if eval_env and hasattr(eval_env, 'get_state'): with open(os.path.join(logdir, 'eval_env_state.pkl'), 'wb') as", "# Train. epoch_actor_losses = [] epoch_critic_losses = [] epoch_adaptive_distances =", "t_train in range(nb_train_steps): # Adapt param noise, if necessary. if", "= [] epoch_actions = [] epoch_qs = [] epoch_episodes =", "baselines.ddpg.models import Actor, Critic from baselines.ddpg.memory import Memory from baselines.ddpg.noise", "eval_obs, eval_r, eval_done, eval_info = eval_env.step(max_action * eval_action) # scale", "of the episode in each # of the environments, so", "'adaptive-param' in current_noise_type: _, stddev = current_noise_type.split('_') param_noise = AdaptiveParamNoiseSpec(initial_stddev=float(stddev),", "= env.action_space.high logger.info('scaling actions by {} before executing in env'.format(max_action))", "# scalar epoch = 0 start_time = time.time() epoch_episode_rewards =", "and t_train % param_noise_adaption_interval == 0: batch = agent.memory.sample(batch_size=batch_size) obs0", "None: mpi_size = MPI.COMM_WORLD.Get_size() else: mpi_size = 1 # Log", "variable length lists duration = time.time() - start_time stats =", "= np.zeros(nenvs, dtype = int) # vector episodes = 0", "[] epoch_critic_losses = [] epoch_adaptive_distances = [] for t_train in", "t += 1 if rank == 0 and render: env.render()", "epoch_actor_losses.append(al) agent.update_target_net() # Evaluate. eval_episode_rewards = [] eval_qs = []", "combined_stats['rollout/actions_mean'] = np.mean(epoch_actions) combined_stats['rollout/Q_mean'] = np.mean(epoch_qs) combined_stats['train/loss_actor'] = np.mean(epoch_actor_losses) combined_stats['train/loss_critic']", "import logger import tensorflow as tf import numpy as np", "before executing in env'.format(max_action)) agent = DDPG(actor, critic, memory, env.observation_space.shape,", "following configuration:') logger.info(str(agent.__dict__.items())) if load_path is not None: load_path =", ">= batch_size and t_train % param_noise_adaption_interval == 0: batch =", "every action is in [-1, 1]) if render_eval: eval_env.render() eval_episode_reward", "if memory.nb_entries >= batch_size and t_train % param_noise_adaption_interval == 0:", "with default settings, perform 1M steps total nb_epoch_cycles=20, nb_rollout_steps=100, reward_scale=1.0,", "None def learn(network, env, seed=None, total_timesteps=None, nb_epochs=None, # with default", "env (as far as DDPG is concerned, every action is", "agent.step(eval_obs, apply_noise=False, compute_Q=True) eval_obs, eval_r, eval_done, eval_info = eval_env.step(max_action *", "rollouts. if nenvs > 1: # if simulating multiple envs", "combined_stats['eval/return'] = eval_episode_rewards combined_stats['eval/return_history'] = np.mean(eval_episode_rewards_history) combined_stats['eval/Q'] = eval_qs combined_stats['eval/episodes']", "and hasattr(eval_env, 'get_state'): with open(os.path.join(logdir, 'eval_env_state.pkl'), 'wb') as f: pickle.dump(eval_env.get_state(),", "import deque import pickle from baselines.ddpg.ddpg_learner import DDPG from baselines.ddpg.models", "1]) if render_eval: eval_env.render() eval_episode_reward += eval_r eval_qs.append(eval_q) for d", "action is dimension (nenvs, A) - the multiplication gets broadcasted", "broadcasted to the batch new_obs, r, done, info = env.step(max_action", "set_global_seeds(seed) if total_timesteps is not None: assert nb_epochs is None", "len(eval_episode_rewards) def as_scalar(x): if isinstance(x, np.ndarray): assert x.size == 1", "0 and logdir: if hasattr(env, 'get_state'): with open(os.path.join(logdir, 'env_state.pkl'), 'wb')", "= epoch + 1 combined_stats['total/steps'] = t for key in", "episodes = 0 #scalar t = 0 # scalar epoch", "agent.store_transition(obs, action, r, new_obs, done) #the batched data will be", "agent.reset() # Train. epoch_actor_losses = [] epoch_critic_losses = [] epoch_adaptive_distances", "= np.mean(epoch_qs) combined_stats['train/loss_actor'] = np.mean(epoch_actor_losses) combined_stats['train/loss_critic'] = np.mean(epoch_critic_losses) combined_stats['train/param_noise_distance'] =", "episode_rewards_history.append(episode_reward[d]) epoch_episode_steps.append(episode_step[d]) episode_reward[d] = 0. episode_step[d] = 0 epoch_episodes +=", "[] for t_train in range(nb_train_steps): # Adapt param noise, if", "as np try: from mpi4py import MPI except ImportError: MPI", "new_obs, done) #the batched data will be unrolled in memory.py's", "1 return x[0] elif np.isscalar(x): return x else: raise ValueError('expected", "eval_q, _, _ = agent.step(eval_obs, apply_noise=False, compute_Q=True) eval_obs, eval_r, eval_done,", "sorted(combined_stats.keys()): logger.record_tabular(key, combined_stats[key]) if rank == 0: logger.dump_tabular() logger.info('') logdir", "noise_type is not None: for current_noise_type in noise_type.split(','): current_noise_type =", "ckpt = tf.train.Checkpoint(model=agent) manager = tf.train.CheckpointManager(ckpt, load_path, max_to_keep=None) ckpt.restore(manager.latest_checkpoint) print(\"Restoring", "import os import os.path as osp import time from collections", "episode_reward = np.zeros(nenvs, dtype = np.float32) #vector episode_step = np.zeros(nenvs,", "#vector episode_step = np.zeros(nenvs, dtype = int) # vector episodes", "done[d]: # Episode done. epoch_episode_rewards.append(episode_reward[d]) episode_rewards_history.append(episode_reward[d]) epoch_episode_steps.append(episode_step[d]) episode_reward[d] = 0.", "epoch_critic_losses = [] epoch_adaptive_distances = [] for t_train in range(nb_train_steps):", "done. epoch_episode_rewards.append(episode_reward[d]) episode_rewards_history.append(episode_reward[d]) epoch_episode_steps.append(episode_step[d]) episode_reward[d] = 0. episode_step[d] = 0", "if isinstance(x, np.ndarray): assert x.size == 1 return x[0] elif", "start_time = time.time() epoch_episode_rewards = [] epoch_episode_steps = [] epoch_actions", "+= 1 if rank == 0 and render: env.render() episode_reward", "assert (np.abs(env.action_space.low) == env.action_space.high).all() # we assume symmetric actions. memory", "nenvs = obs.shape[0] episode_reward = np.zeros(nenvs, dtype = np.float32) #vector", "the following configuration:') logger.info(str(agent.__dict__.items())) if load_path is not None: load_path", "import time from collections import deque import pickle from baselines.ddpg.ddpg_learner", "param_noise_adaption_interval=50, load_path=None, **network_kwargs): set_global_seeds(seed) if total_timesteps is not None: assert", "== env.action_space.high).all() # we assume symmetric actions. memory = Memory(limit=int(1e6),", "env.render() episode_reward += r episode_step += 1 # Book-keeping. epoch_actions.append(action)", "rank = MPI.COMM_WORLD.Get_rank() else: rank = 0 nb_actions = env.action_space.shape[-1]", "is not None: for current_noise_type in noise_type.split(','): current_noise_type = current_noise_type.strip()", "nb_epochs = 500 if MPI is not None: rank =", "seed=None, total_timesteps=None, nb_epochs=None, # with default settings, perform 1M steps", "= agent.get_stats() combined_stats = stats.copy() combined_stats['rollout/return'] = np.mean(epoch_episode_rewards) combined_stats['rollout/return_std'] =", "call np.mean on variable length lists duration = time.time() -", "- start_time stats = agent.get_stats() combined_stats = stats.copy() combined_stats['rollout/return'] =", "if render_eval: eval_env.render() eval_episode_reward += eval_r eval_qs.append(eval_q) for d in", "np.mean(epoch_critic_losses) combined_stats['train/param_noise_distance'] = np.mean(epoch_adaptive_distances) combined_stats['total/duration'] = duration combined_stats['total/steps_per_second'] = float(t)", "is not None: combined_stats['eval/return'] = eval_episode_rewards combined_stats['eval/return_history'] = np.mean(eval_episode_rewards_history) combined_stats['eval/Q']", "agent = DDPG(actor, critic, memory, env.observation_space.shape, env.action_space.shape, gamma=gamma, tau=tau, normalize_returns=normalize_returns,", "combined_stats['eval/return_history'] = np.mean(eval_episode_rewards_history) combined_stats['eval/Q'] = eval_qs combined_stats['eval/episodes'] = len(eval_episode_rewards) def", "action_noise = NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) elif 'ou' in current_noise_type:", "print(\"Restoring from {}\".format(manager.latest_checkpoint)) eval_episode_rewards_history = deque(maxlen=100) episode_rewards_history = deque(maxlen=100) #", "return x else: raise ValueError('expected scalar, got %s'%x) combined_stats_sums =", "elif np.isscalar(x): return x else: raise ValueError('expected scalar, got %s'%x)", "al = agent.train() epoch_critic_losses.append(cl) epoch_actor_losses.append(al) agent.update_target_net() # Evaluate. eval_episode_rewards =", "action.numpy(), q.numpy() # Execute next action. if rank == 0", "0 # scalar epoch = 0 start_time = time.time() epoch_episode_rewards", "= {k : v / mpi_size for (k,v) in zip(combined_stats.keys(),", "logger.dump_tabular() logger.info('') logdir = logger.get_dir() if rank == 0 and", "epoch_episode_steps = [] epoch_actions = [] epoch_qs = [] epoch_episodes", "current_noise_type.split('_') action_noise = NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) elif 'ou' in", "== 0 and logdir: if hasattr(env, 'get_state'): with open(os.path.join(logdir, 'env_state.pkl'),", "A) - the multiplication gets broadcasted to the batch new_obs,", "for d in range(len(eval_done)): if eval_done[d]: eval_episode_rewards.append(eval_episode_reward[d]) eval_episode_rewards_history.append(eval_episode_reward[d]) eval_episode_reward[d] =", "== 0 and render: env.render() episode_reward += r episode_step +=", "worker tau=0.01, eval_env=None, param_noise_adaption_interval=50, load_path=None, **network_kwargs): set_global_seeds(seed) if total_timesteps is", "else: mpi_size = 1 # Log stats. # XXX shouldn't", "combined_stats['eval/episodes'] = len(eval_episode_rewards) def as_scalar(x): if isinstance(x, np.ndarray): assert x.size", "if eval_done[d]: eval_episode_rewards.append(eval_episode_reward[d]) eval_episode_rewards_history.append(eval_episode_reward[d]) eval_episode_reward[d] = 0.0 if MPI is", "obs0 = tf.constant(batch['obs0']) distance = agent.adapt_param_noise(obs0) epoch_adaptive_distances.append(distance) cl, al =", "# of the environments, so resetting here instead agent.reset() for", "combined_stats['rollout/return_std'] = np.std(epoch_episode_rewards) combined_stats['rollout/return_history'] = np.mean(episode_rewards_history) combined_stats['rollout/return_history_std'] = np.std(episode_rewards_history) combined_stats['rollout/episode_steps']", "assume symmetric actions. memory = Memory(limit=int(1e6), action_shape=env.action_space.shape, observation_shape=env.observation_space.shape) critic =", "is dimension (nenvs, A) - the multiplication gets broadcasted to", "np.std(epoch_actions) # Evaluation statistics. if eval_env is not None: combined_stats['eval/return']", "scalar, got %s'%x) combined_stats_sums = np.array([ np.array(x).flatten()[0] for x in", "not None: eval_obs = eval_env.reset() nenvs = obs.shape[0] episode_reward =", "DDPG from baselines.ddpg.models import Actor, Critic from baselines.ddpg.memory import Memory", "else: nb_epochs = 500 if MPI is not None: rank", "ob_shape=env.observation_space.shape, network=network, **network_kwargs) actor = Actor(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) action_noise", "= eval_episode_rewards combined_stats['eval/return_history'] = np.mean(eval_episode_rewards_history) combined_stats['eval/Q'] = eval_qs combined_stats['eval/episodes'] =", "env.reset() if eval_env is not None: eval_obs = eval_env.reset() nenvs", "tf.constant(batch['obs0']) distance = agent.adapt_param_noise(obs0) epoch_adaptive_distances.append(distance) cl, al = agent.train() epoch_critic_losses.append(cl)", "agent.train() epoch_critic_losses.append(cl) epoch_actor_losses.append(al) agent.update_target_net() # Evaluate. eval_episode_rewards = [] eval_qs", "cycle and MPI worker, nb_eval_steps=100, batch_size=64, # per MPI worker", "combined_stats['train/param_noise_distance'] = np.mean(epoch_adaptive_distances) combined_stats['total/duration'] = duration combined_stats['total/steps_per_second'] = float(t) /", "normalize_observations=normalize_observations, batch_size=batch_size, action_noise=action_noise, param_noise=param_noise, critic_l2_reg=critic_l2_reg, actor_lr=actor_lr, critic_lr=critic_lr, enable_popart=popart, clip_norm=clip_norm, reward_scale=reward_scale)", "osp import time from collections import deque import pickle from", "# with default settings, perform 1M steps total nb_epoch_cycles=20, nb_rollout_steps=100,", "from baselines.ddpg.noise import AdaptiveParamNoiseSpec, NormalActionNoise, OrnsteinUhlenbeckActionNoise from baselines.common import set_global_seeds", "= MPI.COMM_WORLD.Get_rank() else: rank = 0 nb_actions = env.action_space.shape[-1] assert", "we assume symmetric actions. memory = Memory(limit=int(1e6), action_shape=env.action_space.shape, observation_shape=env.observation_space.shape) critic", "q.numpy() # Execute next action. if rank == 0 and", "worker, nb_eval_steps=100, batch_size=64, # per MPI worker tau=0.01, eval_env=None, param_noise_adaption_interval=50,", "batched from vecenv t += 1 if rank == 0", "0 epoch_episodes += 1 episodes += 1 if nenvs ==", "is in [-1, 1]) if render_eval: eval_env.render() eval_episode_reward += eval_r", "= np.zeros(nenvs_eval, dtype = np.float32) for t_rollout in range(nb_eval_steps): eval_action,", "logger.info('') logdir = logger.get_dir() if rank == 0 and logdir:", "= np.mean(epoch_adaptive_distances) combined_stats['total/duration'] = duration combined_stats['total/steps_per_second'] = float(t) / float(duration)", "import pickle from baselines.ddpg.ddpg_learner import DDPG from baselines.ddpg.models import Actor,", "# if simulating multiple envs in parallel, impossible to reset", "_ = agent.step(eval_obs, apply_noise=False, compute_Q=True) eval_obs, eval_r, eval_done, eval_info =", "note these outputs are batched from vecenv t += 1", "stddev = current_noise_type.split('_') action_noise = NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) elif", "data will be unrolled in memory.py's append. obs = new_obs", "eval_info = eval_env.step(max_action * eval_action) # scale for execution in", "steps total nb_epoch_cycles=20, nb_rollout_steps=100, reward_scale=1.0, render=False, render_eval=False, noise_type='adaptive-param_0.2', normalize_returns=False, normalize_observations=True,", "= MPI.COMM_WORLD.allreduce(combined_stats_sums) combined_stats = {k : v / mpi_size for", "combined_stats = {k : v / mpi_size for (k,v) in", "env.step(max_action * action) # scale for execution in env (as", "A, whereas action is dimension (nenvs, A) - the multiplication", "= stats.copy() combined_stats['rollout/return'] = np.mean(epoch_episode_rewards) combined_stats['rollout/return_std'] = np.std(epoch_episode_rewards) combined_stats['rollout/return_history'] =", "from vecenv t += 1 if rank == 0 and", "MPI is not None: mpi_size = MPI.COMM_WORLD.Get_size() else: mpi_size =", "in range(nb_epoch_cycles): # Perform rollouts. if nenvs > 1: #", "if eval_env is not None: eval_obs = eval_env.reset() nenvs =", "= agent.memory.sample(batch_size=batch_size) obs0 = tf.constant(batch['obs0']) distance = agent.adapt_param_noise(obs0) epoch_adaptive_distances.append(distance) cl,", "logger.info('Using agent with the following configuration:') logger.info(str(agent.__dict__.items())) if load_path is", "combined_stats_sums = np.array([ np.array(x).flatten()[0] for x in combined_stats.values()]) if MPI", "logdir: if hasattr(env, 'get_state'): with open(os.path.join(logdir, 'env_state.pkl'), 'wb') as f:", "= env.action_space.shape[-1] assert (np.abs(env.action_space.low) == env.action_space.high).all() # we assume symmetric", "nb_train_steps=50, # per epoch cycle and MPI worker, nb_eval_steps=100, batch_size=64,", "multiple envs in parallel, impossible to reset agent at the", "eval_env is not None: eval_obs = eval_env.reset() nenvs = obs.shape[0]", "AdaptiveParamNoiseSpec, NormalActionNoise, OrnsteinUhlenbeckActionNoise from baselines.common import set_global_seeds from baselines import", "be unrolled in memory.py's append. obs = new_obs for d", "and render: env.render() episode_reward += r episode_step += 1 #", "eval_episode_rewards.append(eval_episode_reward[d]) eval_episode_rewards_history.append(eval_episode_reward[d]) eval_episode_reward[d] = 0.0 if MPI is not None:", "Episode done. epoch_episode_rewards.append(episode_reward[d]) episode_rewards_history.append(episode_reward[d]) epoch_episode_steps.append(episode_step[d]) episode_reward[d] = 0. episode_step[d] =", "np.mean(epoch_adaptive_distances) combined_stats['total/duration'] = duration combined_stats['total/steps_per_second'] = float(t) / float(duration) combined_stats['total/episodes']", "far as DDPG is concerned, every action is in [-1,", "= [] epoch_episode_steps = [] epoch_actions = [] epoch_qs =", "combined_stats['total/duration'] = duration combined_stats['total/steps_per_second'] = float(t) / float(duration) combined_stats['total/episodes'] =", "not None: assert nb_epochs is None nb_epochs = int(total_timesteps) //", "collections import deque import pickle from baselines.ddpg.ddpg_learner import DDPG from", "agent with the following configuration:') logger.info(str(agent.__dict__.items())) if load_path is not", "= tf.constant(batch['obs0']) distance = agent.adapt_param_noise(obs0) epoch_adaptive_distances.append(distance) cl, al = agent.train()", "nb_actions = env.action_space.shape[-1] assert (np.abs(env.action_space.low) == env.action_space.high).all() # we assume", "tf.train.CheckpointManager(ckpt, load_path, max_to_keep=None) ckpt.restore(manager.latest_checkpoint) print(\"Restoring from {}\".format(manager.latest_checkpoint)) eval_episode_rewards_history = deque(maxlen=100)", "per epoch cycle and MPI worker, nb_eval_steps=100, batch_size=64, # per", "(nb_epoch_cycles * nb_rollout_steps) else: nb_epochs = 500 if MPI is", "combined_stats['total/episodes'] = episodes combined_stats['rollout/episodes'] = epoch_episodes combined_stats['rollout/actions_std'] = np.std(epoch_actions) #", "Total statistics. combined_stats['total/epochs'] = epoch + 1 combined_stats['total/steps'] = t", "np.mean(eval_episode_rewards_history) combined_stats['eval/Q'] = eval_qs combined_stats['eval/episodes'] = len(eval_episode_rewards) def as_scalar(x): if", "configuration:') logger.info(str(agent.__dict__.items())) if load_path is not None: load_path = osp.expanduser(load_path)", "nenvs > 1: # if simulating multiple envs in parallel,", "{}\".format(manager.latest_checkpoint)) eval_episode_rewards_history = deque(maxlen=100) episode_rewards_history = deque(maxlen=100) # Prepare everything.", "total_timesteps is not None: assert nb_epochs is None nb_epochs =", "epoch_qs.append(q) agent.store_transition(obs, action, r, new_obs, done) #the batched data will", "critic_l2_reg=critic_l2_reg, actor_lr=actor_lr, critic_lr=critic_lr, enable_popart=popart, clip_norm=clip_norm, reward_scale=reward_scale) logger.info('Using agent with the", "MPI worker, nb_eval_steps=100, batch_size=64, # per MPI worker tau=0.01, eval_env=None,", "in noise_type.split(','): current_noise_type = current_noise_type.strip() if current_noise_type == 'none': pass", "# XXX shouldn't call np.mean on variable length lists duration", "combined_stats['total/steps'] = t for key in sorted(combined_stats.keys()): logger.record_tabular(key, combined_stats[key]) if", "desired_action_stddev=float(stddev)) elif 'normal' in current_noise_type: _, stddev = current_noise_type.split('_') action_noise", "% param_noise_adaption_interval == 0: batch = agent.memory.sample(batch_size=batch_size) obs0 = tf.constant(batch['obs0'])", "= 0 start_time = time.time() epoch_episode_rewards = [] epoch_episode_steps =", "1 episodes += 1 if nenvs == 1: agent.reset() #", "else: raise RuntimeError('unknown noise type \"{}\"'.format(current_noise_type)) max_action = env.action_space.high logger.info('scaling", "and render: env.render() # max_action is of dimension A, whereas", "batch new_obs, r, done, info = env.step(max_action * action) #", "from baselines import logger import tensorflow as tf import numpy", "the environments, so resetting here instead agent.reset() for t_rollout in", "[] epoch_episodes = 0 for epoch in range(nb_epochs): for cycle", "is of dimension A, whereas action is dimension (nenvs, A)", "new_obs, r, done, info = env.step(max_action * action) # scale", "MPI.COMM_WORLD.Get_size() else: mpi_size = 1 # Log stats. # XXX", "stats. # XXX shouldn't call np.mean on variable length lists", "r episode_step += 1 # Book-keeping. epoch_actions.append(action) epoch_qs.append(q) agent.store_transition(obs, action,", "type \"{}\"'.format(current_noise_type)) max_action = env.action_space.high logger.info('scaling actions by {} before", "memory.nb_entries >= batch_size and t_train % param_noise_adaption_interval == 0: batch", "as_scalar(x): if isinstance(x, np.ndarray): assert x.size == 1 return x[0]", "= env.step(max_action * action) # scale for execution in env", "None: nenvs_eval = eval_obs.shape[0] eval_episode_reward = np.zeros(nenvs_eval, dtype = np.float32)", "= agent.train() epoch_critic_losses.append(cl) epoch_actor_losses.append(al) agent.update_target_net() # Evaluate. eval_episode_rewards = []", "perform 1M steps total nb_epoch_cycles=20, nb_rollout_steps=100, reward_scale=1.0, render=False, render_eval=False, noise_type='adaptive-param_0.2',", "reward_scale=reward_scale) logger.info('Using agent with the following configuration:') logger.info(str(agent.__dict__.items())) if load_path", "_ = agent.step(tf.constant(obs), apply_noise=True, compute_Q=True) action, q = action.numpy(), q.numpy()", "def learn(network, env, seed=None, total_timesteps=None, nb_epochs=None, # with default settings,", "eval_obs = eval_env.reset() nenvs = obs.shape[0] episode_reward = np.zeros(nenvs, dtype", "1 if nenvs == 1: agent.reset() # Train. epoch_actor_losses =", "in each # of the environments, so resetting here instead", "in zip(combined_stats.keys(), combined_stats_sums)} # Total statistics. combined_stats['total/epochs'] = epoch +", "# per epoch cycle and MPI worker, nb_eval_steps=100, batch_size=64, #", "= np.mean(epoch_episode_rewards) combined_stats['rollout/return_std'] = np.std(epoch_episode_rewards) combined_stats['rollout/return_history'] = np.mean(episode_rewards_history) combined_stats['rollout/return_history_std'] =", "instead agent.reset() for t_rollout in range(nb_rollout_steps): # Predict next action.", "# Evaluate. eval_episode_rewards = [] eval_qs = [] if eval_env", "except ImportError: MPI = None def learn(network, env, seed=None, total_timesteps=None,", "in range(nb_epochs): for cycle in range(nb_epoch_cycles): # Perform rollouts. if", "if rank == 0 and render: env.render() # max_action is", "OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) else: raise RuntimeError('unknown noise type \"{}\"'.format(current_noise_type))", "= eval_env.reset() nenvs = obs.shape[0] episode_reward = np.zeros(nenvs, dtype =", "for (k,v) in zip(combined_stats.keys(), combined_stats_sums)} # Total statistics. combined_stats['total/epochs'] =", "'ou' in current_noise_type: _, stddev = current_noise_type.split('_') action_noise = OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions),", "DDPG(actor, critic, memory, env.observation_space.shape, env.action_space.shape, gamma=gamma, tau=tau, normalize_returns=normalize_returns, normalize_observations=normalize_observations, batch_size=batch_size,", "# Episode done. epoch_episode_rewards.append(episode_reward[d]) episode_rewards_history.append(episode_reward[d]) epoch_episode_steps.append(episode_step[d]) episode_reward[d] = 0. episode_step[d]", "1]) # note these outputs are batched from vecenv t", "np.ones(nb_actions)) elif 'ou' in current_noise_type: _, stddev = current_noise_type.split('_') action_noise", "np.array(x).flatten()[0] for x in combined_stats.values()]) if MPI is not None:", "[-1, 1]) if render_eval: eval_env.render() eval_episode_reward += eval_r eval_qs.append(eval_q) for", "baselines.ddpg.ddpg_learner import DDPG from baselines.ddpg.models import Actor, Critic from baselines.ddpg.memory", "baselines.ddpg.memory import Memory from baselines.ddpg.noise import AdaptiveParamNoiseSpec, NormalActionNoise, OrnsteinUhlenbeckActionNoise from", "load_path, max_to_keep=None) ckpt.restore(manager.latest_checkpoint) print(\"Restoring from {}\".format(manager.latest_checkpoint)) eval_episode_rewards_history = deque(maxlen=100) episode_rewards_history", "episode_reward[d] = 0. episode_step[d] = 0 epoch_episodes += 1 episodes", "Perform rollouts. if nenvs > 1: # if simulating multiple", "baselines.ddpg.noise import AdaptiveParamNoiseSpec, NormalActionNoise, OrnsteinUhlenbeckActionNoise from baselines.common import set_global_seeds from", "ImportError: MPI = None def learn(network, env, seed=None, total_timesteps=None, nb_epochs=None,", "to the batch new_obs, r, done, info = env.step(max_action *", "np.mean(epoch_actions) combined_stats['rollout/Q_mean'] = np.mean(epoch_qs) combined_stats['train/loss_actor'] = np.mean(epoch_actor_losses) combined_stats['train/loss_critic'] = np.mean(epoch_critic_losses)", "and MPI worker, nb_eval_steps=100, batch_size=64, # per MPI worker tau=0.01,", "epoch_actions = [] epoch_qs = [] epoch_episodes = 0 for", "done) #the batched data will be unrolled in memory.py's append.", "compute_Q=True) action, q = action.numpy(), q.numpy() # Execute next action.", "render: env.render() episode_reward += r episode_step += 1 # Book-keeping.", "in env'.format(max_action)) agent = DDPG(actor, critic, memory, env.observation_space.shape, env.action_space.shape, gamma=gamma,", "duration = time.time() - start_time stats = agent.get_stats() combined_stats =", "not None: load_path = osp.expanduser(load_path) ckpt = tf.train.Checkpoint(model=agent) manager =", "range(len(eval_done)): if eval_done[d]: eval_episode_rewards.append(eval_episode_reward[d]) eval_episode_rewards_history.append(eval_episode_reward[d]) eval_episode_reward[d] = 0.0 if MPI", "eval_action, eval_q, _, _ = agent.step(eval_obs, apply_noise=False, compute_Q=True) eval_obs, eval_r,", "gets broadcasted to the batch new_obs, r, done, info =", "# max_action is of dimension A, whereas action is dimension", "Memory from baselines.ddpg.noise import AdaptiveParamNoiseSpec, NormalActionNoise, OrnsteinUhlenbeckActionNoise from baselines.common import", "agent.get_stats() combined_stats = stats.copy() combined_stats['rollout/return'] = np.mean(epoch_episode_rewards) combined_stats['rollout/return_std'] = np.std(epoch_episode_rewards)", "= np.mean(epoch_critic_losses) combined_stats['train/param_noise_distance'] = np.mean(epoch_adaptive_distances) combined_stats['total/duration'] = duration combined_stats['total/steps_per_second'] =", "statistics. if eval_env is not None: combined_stats['eval/return'] = eval_episode_rewards combined_stats['eval/return_history']", "range(nb_train_steps): # Adapt param noise, if necessary. if memory.nb_entries >=", "eval_r eval_qs.append(eval_q) for d in range(len(eval_done)): if eval_done[d]: eval_episode_rewards.append(eval_episode_reward[d]) eval_episode_rewards_history.append(eval_episode_reward[d])", "not None: for current_noise_type in noise_type.split(','): current_noise_type = current_noise_type.strip() if", "is not None: eval_obs = eval_env.reset() nenvs = obs.shape[0] episode_reward", "else: rank = 0 nb_actions = env.action_space.shape[-1] assert (np.abs(env.action_space.low) ==", "Evaluate. eval_episode_rewards = [] eval_qs = [] if eval_env is", "if total_timesteps is not None: assert nb_epochs is None nb_epochs", "np.ones(nb_actions)) else: raise RuntimeError('unknown noise type \"{}\"'.format(current_noise_type)) max_action = env.action_space.high", "os.path as osp import time from collections import deque import", "eval_env is not None: nenvs_eval = eval_obs.shape[0] eval_episode_reward = np.zeros(nenvs_eval,", "t_rollout in range(nb_eval_steps): eval_action, eval_q, _, _ = agent.step(eval_obs, apply_noise=False,", "in current_noise_type: _, stddev = current_noise_type.split('_') action_noise = OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev)", "episode_step = np.zeros(nenvs, dtype = int) # vector episodes =", "if nenvs > 1: # if simulating multiple envs in", "= np.mean(epoch_actions) combined_stats['rollout/Q_mean'] = np.mean(epoch_qs) combined_stats['train/loss_actor'] = np.mean(epoch_actor_losses) combined_stats['train/loss_critic'] =", "env.action_space.shape, gamma=gamma, tau=tau, normalize_returns=normalize_returns, normalize_observations=normalize_observations, batch_size=batch_size, action_noise=action_noise, param_noise=param_noise, critic_l2_reg=critic_l2_reg, actor_lr=actor_lr,", "of the environments, so resetting here instead agent.reset() for t_rollout", "= Actor(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) action_noise = None param_noise =", "obs = new_obs for d in range(len(done)): if done[d]: #", "distance = agent.adapt_param_noise(obs0) epoch_adaptive_distances.append(distance) cl, al = agent.train() epoch_critic_losses.append(cl) epoch_actor_losses.append(al)", "if rank == 0 and logdir: if hasattr(env, 'get_state'): with", "critic_lr=1e-3, popart=False, gamma=0.99, clip_norm=None, nb_train_steps=50, # per epoch cycle and", "+= eval_r eval_qs.append(eval_q) for d in range(len(eval_done)): if eval_done[d]: eval_episode_rewards.append(eval_episode_reward[d])", "= new_obs for d in range(len(done)): if done[d]: # Episode", "agent.reset() obs = env.reset() if eval_env is not None: eval_obs", "= NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) elif 'ou' in current_noise_type: _,", "next action. if rank == 0 and render: env.render() #", "batch_size=batch_size, action_noise=action_noise, param_noise=param_noise, critic_l2_reg=critic_l2_reg, actor_lr=actor_lr, critic_lr=critic_lr, enable_popart=popart, clip_norm=clip_norm, reward_scale=reward_scale) logger.info('Using", "from mpi4py import MPI except ImportError: MPI = None def", "for x in combined_stats.values()]) if MPI is not None: combined_stats_sums", "raise RuntimeError('unknown noise type \"{}\"'.format(current_noise_type)) max_action = env.action_space.high logger.info('scaling actions", "epoch cycle and MPI worker, nb_eval_steps=100, batch_size=64, # per MPI", "t = 0 # scalar epoch = 0 start_time =", "eval_episode_rewards_history.append(eval_episode_reward[d]) eval_episode_reward[d] = 0.0 if MPI is not None: mpi_size", "np.isscalar(x): return x else: raise ValueError('expected scalar, got %s'%x) combined_stats_sums", "// (nb_epoch_cycles * nb_rollout_steps) else: nb_epochs = 500 if MPI", "the multiplication gets broadcasted to the batch new_obs, r, done,", "not None: combined_stats_sums = MPI.COMM_WORLD.allreduce(combined_stats_sums) combined_stats = {k : v", "**network_kwargs) actor = Actor(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) action_noise = None", "'get_state'): with open(os.path.join(logdir, 'env_state.pkl'), 'wb') as f: pickle.dump(env.get_state(), f) if", "baselines import logger import tensorflow as tf import numpy as", "normalize_returns=False, normalize_observations=True, critic_l2_reg=1e-2, actor_lr=1e-4, critic_lr=1e-3, popart=False, gamma=0.99, clip_norm=None, nb_train_steps=50, #", "max_action = env.action_space.high logger.info('scaling actions by {} before executing in", "None: combined_stats['eval/return'] = eval_episode_rewards combined_stats['eval/return_history'] = np.mean(eval_episode_rewards_history) combined_stats['eval/Q'] = eval_qs", "resetting here instead agent.reset() for t_rollout in range(nb_rollout_steps): # Predict", "necessary. if memory.nb_entries >= batch_size and t_train % param_noise_adaption_interval ==", "import os.path as osp import time from collections import deque", "1 # Book-keeping. epoch_actions.append(action) epoch_qs.append(q) agent.store_transition(obs, action, r, new_obs, done)", "concerned, every action is in [-1, 1]) if render_eval: eval_env.render()", "by {} before executing in env'.format(max_action)) agent = DDPG(actor, critic,", "for t_rollout in range(nb_rollout_steps): # Predict next action. action, q,", "in current_noise_type: _, stddev = current_noise_type.split('_') param_noise = AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev))", "with the following configuration:') logger.info(str(agent.__dict__.items())) if load_path is not None:", "nb_epoch_cycles=20, nb_rollout_steps=100, reward_scale=1.0, render=False, render_eval=False, noise_type='adaptive-param_0.2', normalize_returns=False, normalize_observations=True, critic_l2_reg=1e-2, actor_lr=1e-4,", "param_noise=param_noise, critic_l2_reg=critic_l2_reg, actor_lr=actor_lr, critic_lr=critic_lr, enable_popart=popart, clip_norm=clip_norm, reward_scale=reward_scale) logger.info('Using agent with", "deque(maxlen=100) # Prepare everything. agent.initialize() agent.reset() obs = env.reset() if", "in range(len(done)): if done[d]: # Episode done. epoch_episode_rewards.append(episode_reward[d]) episode_rewards_history.append(episode_reward[d]) epoch_episode_steps.append(episode_step[d])", "Evaluation statistics. if eval_env is not None: combined_stats['eval/return'] = eval_episode_rewards", "OrnsteinUhlenbeckActionNoise from baselines.common import set_global_seeds from baselines import logger import", "tau=tau, normalize_returns=normalize_returns, normalize_observations=normalize_observations, batch_size=batch_size, action_noise=action_noise, param_noise=param_noise, critic_l2_reg=critic_l2_reg, actor_lr=actor_lr, critic_lr=critic_lr, enable_popart=popart,", "if noise_type is not None: for current_noise_type in noise_type.split(','): current_noise_type", "obs.shape[0] episode_reward = np.zeros(nenvs, dtype = np.float32) #vector episode_step =", "= logger.get_dir() if rank == 0 and logdir: if hasattr(env,", "int) # vector episodes = 0 #scalar t = 0", "episode_step += 1 # Book-keeping. epoch_actions.append(action) epoch_qs.append(q) agent.store_transition(obs, action, r,", "import MPI except ImportError: MPI = None def learn(network, env,", "current_noise_type == 'none': pass elif 'adaptive-param' in current_noise_type: _, stddev", "everything. agent.initialize() agent.reset() obs = env.reset() if eval_env is not", "symmetric actions. memory = Memory(limit=int(1e6), action_shape=env.action_space.shape, observation_shape=env.observation_space.shape) critic = Critic(nb_actions,", "# note these outputs are batched from vecenv t +=", "== 1: agent.reset() # Train. epoch_actor_losses = [] epoch_critic_losses =", "[-1, 1]) # note these outputs are batched from vecenv", "every action is in [-1, 1]) # note these outputs", "MPI.COMM_WORLD.Get_rank() else: rank = 0 nb_actions = env.action_space.shape[-1] assert (np.abs(env.action_space.low)", "0 nb_actions = env.action_space.shape[-1] assert (np.abs(env.action_space.low) == env.action_space.high).all() # we", "/ float(duration) combined_stats['total/episodes'] = episodes combined_stats['rollout/episodes'] = epoch_episodes combined_stats['rollout/actions_std'] =", "critic = Critic(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) actor = Actor(nb_actions, ob_shape=env.observation_space.shape,", "dtype = np.float32) #vector episode_step = np.zeros(nenvs, dtype = int)", "normalize_observations=True, critic_l2_reg=1e-2, actor_lr=1e-4, critic_lr=1e-3, popart=False, gamma=0.99, clip_norm=None, nb_train_steps=50, # per", "np.float32) #vector episode_step = np.zeros(nenvs, dtype = int) # vector", "0: batch = agent.memory.sample(batch_size=batch_size) obs0 = tf.constant(batch['obs0']) distance = agent.adapt_param_noise(obs0)", "if nenvs == 1: agent.reset() # Train. epoch_actor_losses = []", "# we assume symmetric actions. memory = Memory(limit=int(1e6), action_shape=env.action_space.shape, observation_shape=env.observation_space.shape)", "'wb') as f: pickle.dump(env.get_state(), f) if eval_env and hasattr(eval_env, 'get_state'):", "agent.reset() for t_rollout in range(nb_rollout_steps): # Predict next action. action,", "env.action_space.shape[-1] assert (np.abs(env.action_space.low) == env.action_space.high).all() # we assume symmetric actions.", "{} before executing in env'.format(max_action)) agent = DDPG(actor, critic, memory,", "combined_stats[key]) if rank == 0: logger.dump_tabular() logger.info('') logdir = logger.get_dir()", "for d in range(len(done)): if done[d]: # Episode done. epoch_episode_rewards.append(episode_reward[d])", "+= r episode_step += 1 # Book-keeping. epoch_actions.append(action) epoch_qs.append(q) agent.store_transition(obs,", "{k : v / mpi_size for (k,v) in zip(combined_stats.keys(), combined_stats_sums)}", "None nb_epochs = int(total_timesteps) // (nb_epoch_cycles * nb_rollout_steps) else: nb_epochs", "critic, memory, env.observation_space.shape, env.action_space.shape, gamma=gamma, tau=tau, normalize_returns=normalize_returns, normalize_observations=normalize_observations, batch_size=batch_size, action_noise=action_noise,", "total_timesteps=None, nb_epochs=None, # with default settings, perform 1M steps total", "not None: rank = MPI.COMM_WORLD.Get_rank() else: rank = 0 nb_actions", "import tensorflow as tf import numpy as np try: from", "stddev = current_noise_type.split('_') action_noise = OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) else:", "in range(nb_train_steps): # Adapt param noise, if necessary. if memory.nb_entries", "got %s'%x) combined_stats_sums = np.array([ np.array(x).flatten()[0] for x in combined_stats.values()])", "env'.format(max_action)) agent = DDPG(actor, critic, memory, env.observation_space.shape, env.action_space.shape, gamma=gamma, tau=tau,", "np.mean(episode_rewards_history) combined_stats['rollout/return_history_std'] = np.std(episode_rewards_history) combined_stats['rollout/episode_steps'] = np.mean(epoch_episode_steps) combined_stats['rollout/actions_mean'] = np.mean(epoch_actions)", "= agent.step(eval_obs, apply_noise=False, compute_Q=True) eval_obs, eval_r, eval_done, eval_info = eval_env.step(max_action", "param_noise_adaption_interval == 0: batch = agent.memory.sample(batch_size=batch_size) obs0 = tf.constant(batch['obs0']) distance", "None: rank = MPI.COMM_WORLD.Get_rank() else: rank = 0 nb_actions =", "#scalar t = 0 # scalar epoch = 0 start_time", "eval_done, eval_info = eval_env.step(max_action * eval_action) # scale for execution", "ob_shape=env.observation_space.shape, network=network, **network_kwargs) action_noise = None param_noise = None if", "new_obs for d in range(len(done)): if done[d]: # Episode done.", "x.size == 1 return x[0] elif np.isscalar(x): return x else:", "500 if MPI is not None: rank = MPI.COMM_WORLD.Get_rank() else:", "load_path is not None: load_path = osp.expanduser(load_path) ckpt = tf.train.Checkpoint(model=agent)", "= None if noise_type is not None: for current_noise_type in", "render_eval: eval_env.render() eval_episode_reward += eval_r eval_qs.append(eval_q) for d in range(len(eval_done)):", "render=False, render_eval=False, noise_type='adaptive-param_0.2', normalize_returns=False, normalize_observations=True, critic_l2_reg=1e-2, actor_lr=1e-4, critic_lr=1e-3, popart=False, gamma=0.99,", "if simulating multiple envs in parallel, impossible to reset agent", "- the multiplication gets broadcasted to the batch new_obs, r,", "= np.array([ np.array(x).flatten()[0] for x in combined_stats.values()]) if MPI is", "d in range(len(eval_done)): if eval_done[d]: eval_episode_rewards.append(eval_episode_reward[d]) eval_episode_rewards_history.append(eval_episode_reward[d]) eval_episode_reward[d] = 0.0", "r, new_obs, done) #the batched data will be unrolled in", "import Memory from baselines.ddpg.noise import AdaptiveParamNoiseSpec, NormalActionNoise, OrnsteinUhlenbeckActionNoise from baselines.common", "the end of the episode in each # of the", "in memory.py's append. obs = new_obs for d in range(len(done)):", "of dimension A, whereas action is dimension (nenvs, A) -", "rank = 0 nb_actions = env.action_space.shape[-1] assert (np.abs(env.action_space.low) == env.action_space.high).all()", "so resetting here instead agent.reset() for t_rollout in range(nb_rollout_steps): #", "* eval_action) # scale for execution in env (as far", "1 combined_stats['total/steps'] = t for key in sorted(combined_stats.keys()): logger.record_tabular(key, combined_stats[key])", "agent at the end of the episode in each #", "default settings, perform 1M steps total nb_epoch_cycles=20, nb_rollout_steps=100, reward_scale=1.0, render=False,", "range(len(done)): if done[d]: # Episode done. epoch_episode_rewards.append(episode_reward[d]) episode_rewards_history.append(episode_reward[d]) epoch_episode_steps.append(episode_step[d]) episode_reward[d]", "current_noise_type.strip() if current_noise_type == 'none': pass elif 'adaptive-param' in current_noise_type:", "Train. epoch_actor_losses = [] epoch_critic_losses = [] epoch_adaptive_distances = []", "load_path = osp.expanduser(load_path) ckpt = tf.train.Checkpoint(model=agent) manager = tf.train.CheckpointManager(ckpt, load_path,", "import set_global_seeds from baselines import logger import tensorflow as tf", "range(nb_eval_steps): eval_action, eval_q, _, _ = agent.step(eval_obs, apply_noise=False, compute_Q=True) eval_obs,", "combined_stats_sums = MPI.COMM_WORLD.allreduce(combined_stats_sums) combined_stats = {k : v / mpi_size", "key in sorted(combined_stats.keys()): logger.record_tabular(key, combined_stats[key]) if rank == 0: logger.dump_tabular()", "(nenvs, A) - the multiplication gets broadcasted to the batch", "**network_kwargs) action_noise = None param_noise = None if noise_type is", "eval_env is not None: combined_stats['eval/return'] = eval_episode_rewards combined_stats['eval/return_history'] = np.mean(eval_episode_rewards_history)", "epoch in range(nb_epochs): for cycle in range(nb_epoch_cycles): # Perform rollouts.", "from collections import deque import pickle from baselines.ddpg.ddpg_learner import DDPG", "# Predict next action. action, q, _, _ = agent.step(tf.constant(obs),", "zip(combined_stats.keys(), combined_stats_sums)} # Total statistics. combined_stats['total/epochs'] = epoch + 1", "reward_scale=1.0, render=False, render_eval=False, noise_type='adaptive-param_0.2', normalize_returns=False, normalize_observations=True, critic_l2_reg=1e-2, actor_lr=1e-4, critic_lr=1e-3, popart=False,", "# scale for execution in env (as far as DDPG", "vecenv t += 1 if rank == 0 and render:", "combined_stats['train/loss_actor'] = np.mean(epoch_actor_losses) combined_stats['train/loss_critic'] = np.mean(epoch_critic_losses) combined_stats['train/param_noise_distance'] = np.mean(epoch_adaptive_distances) combined_stats['total/duration']", "dimension A, whereas action is dimension (nenvs, A) - the", "0 #scalar t = 0 # scalar epoch = 0", "eval_episode_reward += eval_r eval_qs.append(eval_q) for d in range(len(eval_done)): if eval_done[d]:", "vector episodes = 0 #scalar t = 0 # scalar", "action, q, _, _ = agent.step(tf.constant(obs), apply_noise=True, compute_Q=True) action, q", "current_noise_type = current_noise_type.strip() if current_noise_type == 'none': pass elif 'adaptive-param'", "float(t) / float(duration) combined_stats['total/episodes'] = episodes combined_stats['rollout/episodes'] = epoch_episodes combined_stats['rollout/actions_std']", "not None: nenvs_eval = eval_obs.shape[0] eval_episode_reward = np.zeros(nenvs_eval, dtype =", "f) if eval_env and hasattr(eval_env, 'get_state'): with open(os.path.join(logdir, 'eval_env_state.pkl'), 'wb')", "_, stddev = current_noise_type.split('_') action_noise = NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions))", "is in [-1, 1]) # note these outputs are batched", "[] epoch_adaptive_distances = [] for t_train in range(nb_train_steps): # Adapt", "np.mean(epoch_episode_steps) combined_stats['rollout/actions_mean'] = np.mean(epoch_actions) combined_stats['rollout/Q_mean'] = np.mean(epoch_qs) combined_stats['train/loss_actor'] = np.mean(epoch_actor_losses)", "x in combined_stats.values()]) if MPI is not None: combined_stats_sums =", "for cycle in range(nb_epoch_cycles): # Perform rollouts. if nenvs >", "range(nb_rollout_steps): # Predict next action. action, q, _, _ =", "> 1: # if simulating multiple envs in parallel, impossible", "action. action, q, _, _ = agent.step(tf.constant(obs), apply_noise=True, compute_Q=True) action,", "if current_noise_type == 'none': pass elif 'adaptive-param' in current_noise_type: _,", "here instead agent.reset() for t_rollout in range(nb_rollout_steps): # Predict next", "= epoch_episodes combined_stats['rollout/actions_std'] = np.std(epoch_actions) # Evaluation statistics. if eval_env", "q, _, _ = agent.step(tf.constant(obs), apply_noise=True, compute_Q=True) action, q =", "logger.record_tabular(key, combined_stats[key]) if rank == 0: logger.dump_tabular() logger.info('') logdir =", "raise ValueError('expected scalar, got %s'%x) combined_stats_sums = np.array([ np.array(x).flatten()[0] for", "combined_stats['rollout/episodes'] = epoch_episodes combined_stats['rollout/actions_std'] = np.std(epoch_actions) # Evaluation statistics. if", "'env_state.pkl'), 'wb') as f: pickle.dump(env.get_state(), f) if eval_env and hasattr(eval_env,", "1: agent.reset() # Train. epoch_actor_losses = [] epoch_critic_losses = []", "next action. action, q, _, _ = agent.step(tf.constant(obs), apply_noise=True, compute_Q=True)", "= eval_env.step(max_action * eval_action) # scale for execution in env", "enable_popart=popart, clip_norm=clip_norm, reward_scale=reward_scale) logger.info('Using agent with the following configuration:') logger.info(str(agent.__dict__.items()))", "if necessary. if memory.nb_entries >= batch_size and t_train % param_noise_adaption_interval", "eval_env.step(max_action * eval_action) # scale for execution in env (as", "= 0 for epoch in range(nb_epochs): for cycle in range(nb_epoch_cycles):", "time.time() - start_time stats = agent.get_stats() combined_stats = stats.copy() combined_stats['rollout/return']", "+= 1 episodes += 1 if nenvs == 1: agent.reset()", "r, done, info = env.step(max_action * action) # scale for", "# per MPI worker tau=0.01, eval_env=None, param_noise_adaption_interval=50, load_path=None, **network_kwargs): set_global_seeds(seed)", "action) # scale for execution in env (as far as", "== 'none': pass elif 'adaptive-param' in current_noise_type: _, stddev =", "max_to_keep=None) ckpt.restore(manager.latest_checkpoint) print(\"Restoring from {}\".format(manager.latest_checkpoint)) eval_episode_rewards_history = deque(maxlen=100) episode_rewards_history =", "in sorted(combined_stats.keys()): logger.record_tabular(key, combined_stats[key]) if rank == 0: logger.dump_tabular() logger.info('')", "combined_stats['rollout/episode_steps'] = np.mean(epoch_episode_steps) combined_stats['rollout/actions_mean'] = np.mean(epoch_actions) combined_stats['rollout/Q_mean'] = np.mean(epoch_qs) combined_stats['train/loss_actor']", "np.zeros(nenvs, dtype = np.float32) #vector episode_step = np.zeros(nenvs, dtype =", "network=network, **network_kwargs) actor = Actor(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) action_noise =", "elif 'adaptive-param' in current_noise_type: _, stddev = current_noise_type.split('_') param_noise =", "not None: mpi_size = MPI.COMM_WORLD.Get_size() else: mpi_size = 1 #", "= np.std(epoch_actions) # Evaluation statistics. if eval_env is not None:", "stddev = current_noise_type.split('_') param_noise = AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev)) elif 'normal' in", "load_path=None, **network_kwargs): set_global_seeds(seed) if total_timesteps is not None: assert nb_epochs", "= int) # vector episodes = 0 #scalar t =", "Critic(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) actor = Actor(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs)", "episodes combined_stats['rollout/episodes'] = epoch_episodes combined_stats['rollout/actions_std'] = np.std(epoch_actions) # Evaluation statistics.", "concerned, every action is in [-1, 1]) # note these", "episode_step[d] = 0 epoch_episodes += 1 episodes += 1 if", "not None: combined_stats['eval/return'] = eval_episode_rewards combined_stats['eval/return_history'] = np.mean(eval_episode_rewards_history) combined_stats['eval/Q'] =", "# Prepare everything. agent.initialize() agent.reset() obs = env.reset() if eval_env", "for t_rollout in range(nb_eval_steps): eval_action, eval_q, _, _ = agent.step(eval_obs,", "= current_noise_type.split('_') param_noise = AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev)) elif 'normal' in current_noise_type:", "x[0] elif np.isscalar(x): return x else: raise ValueError('expected scalar, got", "eval_action) # scale for execution in env (as far as", "noise_type.split(','): current_noise_type = current_noise_type.strip() if current_noise_type == 'none': pass elif", "<filename>baselines/ddpg/ddpg.py import os import os.path as osp import time from", "(k,v) in zip(combined_stats.keys(), combined_stats_sums)} # Total statistics. combined_stats['total/epochs'] = epoch", "actor_lr=actor_lr, critic_lr=critic_lr, enable_popart=popart, clip_norm=clip_norm, reward_scale=reward_scale) logger.info('Using agent with the following", "multiplication gets broadcasted to the batch new_obs, r, done, info", "are batched from vecenv t += 1 if rank ==", "popart=False, gamma=0.99, clip_norm=None, nb_train_steps=50, # per epoch cycle and MPI", "= [] epoch_critic_losses = [] epoch_adaptive_distances = [] for t_train", "= int(total_timesteps) // (nb_epoch_cycles * nb_rollout_steps) else: nb_epochs = 500", "combined_stats_sums)} # Total statistics. combined_stats['total/epochs'] = epoch + 1 combined_stats['total/steps']", "episode in each # of the environments, so resetting here", "x else: raise ValueError('expected scalar, got %s'%x) combined_stats_sums = np.array([", "tf import numpy as np try: from mpi4py import MPI", "is not None: mpi_size = MPI.COMM_WORLD.Get_size() else: mpi_size = 1", "actor = Actor(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) action_noise = None param_noise", "action_noise = OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) else: raise RuntimeError('unknown noise", "agent.step(tf.constant(obs), apply_noise=True, compute_Q=True) action, q = action.numpy(), q.numpy() # Execute", "cl, al = agent.train() epoch_critic_losses.append(cl) epoch_actor_losses.append(al) agent.update_target_net() # Evaluate. eval_episode_rewards", "logdir = logger.get_dir() if rank == 0 and logdir: if", "= tf.train.Checkpoint(model=agent) manager = tf.train.CheckpointManager(ckpt, load_path, max_to_keep=None) ckpt.restore(manager.latest_checkpoint) print(\"Restoring from", "eval_env.reset() nenvs = obs.shape[0] episode_reward = np.zeros(nenvs, dtype = np.float32)", "batch_size and t_train % param_noise_adaption_interval == 0: batch = agent.memory.sample(batch_size=batch_size)", "assert nb_epochs is None nb_epochs = int(total_timesteps) // (nb_epoch_cycles *", "eval_episode_reward = np.zeros(nenvs_eval, dtype = np.float32) for t_rollout in range(nb_eval_steps):", "np.ndarray): assert x.size == 1 return x[0] elif np.isscalar(x): return", "Adapt param noise, if necessary. if memory.nb_entries >= batch_size and", "XXX shouldn't call np.mean on variable length lists duration =", "epoch + 1 combined_stats['total/steps'] = t for key in sorted(combined_stats.keys()):", "param noise, if necessary. if memory.nb_entries >= batch_size and t_train", "env.action_space.high logger.info('scaling actions by {} before executing in env'.format(max_action)) agent", "mpi4py import MPI except ImportError: MPI = None def learn(network,", "outputs are batched from vecenv t += 1 if rank", "epoch_adaptive_distances = [] for t_train in range(nb_train_steps): # Adapt param", "**network_kwargs): set_global_seeds(seed) if total_timesteps is not None: assert nb_epochs is", "RuntimeError('unknown noise type \"{}\"'.format(current_noise_type)) max_action = env.action_space.high logger.info('scaling actions by", "np.std(epoch_episode_rewards) combined_stats['rollout/return_history'] = np.mean(episode_rewards_history) combined_stats['rollout/return_history_std'] = np.std(episode_rewards_history) combined_stats['rollout/episode_steps'] = np.mean(epoch_episode_steps)", "None if noise_type is not None: for current_noise_type in noise_type.split(','):", "episodes += 1 if nenvs == 1: agent.reset() # Train.", "epoch_episodes combined_stats['rollout/actions_std'] = np.std(epoch_actions) # Evaluation statistics. if eval_env is", "combined_stats['eval/Q'] = eval_qs combined_stats['eval/episodes'] = len(eval_episode_rewards) def as_scalar(x): if isinstance(x,", "np.zeros(nenvs, dtype = int) # vector episodes = 0 #scalar", "== 0: logger.dump_tabular() logger.info('') logdir = logger.get_dir() if rank ==", "0 for epoch in range(nb_epochs): for cycle in range(nb_epoch_cycles): #", "None: for current_noise_type in noise_type.split(','): current_noise_type = current_noise_type.strip() if current_noise_type", "# Adapt param noise, if necessary. if memory.nb_entries >= batch_size", "int(total_timesteps) // (nb_epoch_cycles * nb_rollout_steps) else: nb_epochs = 500 if", "Predict next action. action, q, _, _ = agent.step(tf.constant(obs), apply_noise=True,", "= [] epoch_adaptive_distances = [] for t_train in range(nb_train_steps): #", "gamma=gamma, tau=tau, normalize_returns=normalize_returns, normalize_observations=normalize_observations, batch_size=batch_size, action_noise=action_noise, param_noise=param_noise, critic_l2_reg=critic_l2_reg, actor_lr=actor_lr, critic_lr=critic_lr,", "eval_qs combined_stats['eval/episodes'] = len(eval_episode_rewards) def as_scalar(x): if isinstance(x, np.ndarray): assert", "combined_stats['total/steps_per_second'] = float(t) / float(duration) combined_stats['total/episodes'] = episodes combined_stats['rollout/episodes'] =", "with open(os.path.join(logdir, 'eval_env_state.pkl'), 'wb') as f: pickle.dump(eval_env.get_state(), f) return agent", "os import os.path as osp import time from collections import", "= np.std(epoch_episode_rewards) combined_stats['rollout/return_history'] = np.mean(episode_rewards_history) combined_stats['rollout/return_history_std'] = np.std(episode_rewards_history) combined_stats['rollout/episode_steps'] =", "action. if rank == 0 and render: env.render() # max_action", "from baselines.common import set_global_seeds from baselines import logger import tensorflow", "np.mean(epoch_actor_losses) combined_stats['train/loss_critic'] = np.mean(epoch_critic_losses) combined_stats['train/param_noise_distance'] = np.mean(epoch_adaptive_distances) combined_stats['total/duration'] = duration", "= [] for t_train in range(nb_train_steps): # Adapt param noise,", "env.render() # max_action is of dimension A, whereas action is", "range(nb_epochs): for cycle in range(nb_epoch_cycles): # Perform rollouts. if nenvs", "critic_lr=critic_lr, enable_popart=popart, clip_norm=clip_norm, reward_scale=reward_scale) logger.info('Using agent with the following configuration:')", "None: combined_stats_sums = MPI.COMM_WORLD.allreduce(combined_stats_sums) combined_stats = {k : v /", "baselines.common import set_global_seeds from baselines import logger import tensorflow as", "MPI is not None: combined_stats_sums = MPI.COMM_WORLD.allreduce(combined_stats_sums) combined_stats = {k", "agent.initialize() agent.reset() obs = env.reset() if eval_env is not None:", "in env (as far as DDPG is concerned, every action", "0 and render: env.render() episode_reward += r episode_step += 1", "manager = tf.train.CheckpointManager(ckpt, load_path, max_to_keep=None) ckpt.restore(manager.latest_checkpoint) print(\"Restoring from {}\".format(manager.latest_checkpoint)) eval_episode_rewards_history", "if load_path is not None: load_path = osp.expanduser(load_path) ckpt =", "+= 1 if nenvs == 1: agent.reset() # Train. epoch_actor_losses", "is not None: assert nb_epochs is None nb_epochs = int(total_timesteps)", "param_noise = None if noise_type is not None: for current_noise_type", "None: eval_obs = eval_env.reset() nenvs = obs.shape[0] episode_reward = np.zeros(nenvs,", "dimension (nenvs, A) - the multiplication gets broadcasted to the", "= current_noise_type.split('_') action_noise = NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) elif 'ou'", "sigma=float(stddev) * np.ones(nb_actions)) elif 'ou' in current_noise_type: _, stddev =", "current_noise_type: _, stddev = current_noise_type.split('_') action_noise = OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) *", "[] epoch_qs = [] epoch_episodes = 0 for epoch in", "epoch_episodes += 1 episodes += 1 if nenvs == 1:", "combined_stats['rollout/Q_mean'] = np.mean(epoch_qs) combined_stats['train/loss_actor'] = np.mean(epoch_actor_losses) combined_stats['train/loss_critic'] = np.mean(epoch_critic_losses) combined_stats['train/param_noise_distance']", "[] epoch_episode_steps = [] epoch_actions = [] epoch_qs = []", "if hasattr(env, 'get_state'): with open(os.path.join(logdir, 'env_state.pkl'), 'wb') as f: pickle.dump(env.get_state(),", "eval_env.render() eval_episode_reward += eval_r eval_qs.append(eval_q) for d in range(len(eval_done)): if", "for execution in env (as far as DDPG is concerned,", "noise type \"{}\"'.format(current_noise_type)) max_action = env.action_space.high logger.info('scaling actions by {}", "+ 1 combined_stats['total/steps'] = t for key in sorted(combined_stats.keys()): logger.record_tabular(key,", "stats = agent.get_stats() combined_stats = stats.copy() combined_stats['rollout/return'] = np.mean(epoch_episode_rewards) combined_stats['rollout/return_std']", "each # of the environments, so resetting here instead agent.reset()", "epoch_episode_rewards.append(episode_reward[d]) episode_rewards_history.append(episode_reward[d]) epoch_episode_steps.append(episode_step[d]) episode_reward[d] = 0. episode_step[d] = 0 epoch_episodes", "memory = Memory(limit=int(1e6), action_shape=env.action_space.shape, observation_shape=env.observation_space.shape) critic = Critic(nb_actions, ob_shape=env.observation_space.shape, network=network,", "else: raise ValueError('expected scalar, got %s'%x) combined_stats_sums = np.array([ np.array(x).flatten()[0]", "/ mpi_size for (k,v) in zip(combined_stats.keys(), combined_stats_sums)} # Total statistics.", "import Actor, Critic from baselines.ddpg.memory import Memory from baselines.ddpg.noise import", "rank == 0: logger.dump_tabular() logger.info('') logdir = logger.get_dir() if rank", "sigma=float(stddev) * np.ones(nb_actions)) else: raise RuntimeError('unknown noise type \"{}\"'.format(current_noise_type)) max_action", "if MPI is not None: mpi_size = MPI.COMM_WORLD.Get_size() else: mpi_size", "agent.memory.sample(batch_size=batch_size) obs0 = tf.constant(batch['obs0']) distance = agent.adapt_param_noise(obs0) epoch_adaptive_distances.append(distance) cl, al", "render_eval=False, noise_type='adaptive-param_0.2', normalize_returns=False, normalize_observations=True, critic_l2_reg=1e-2, actor_lr=1e-4, critic_lr=1e-3, popart=False, gamma=0.99, clip_norm=None,", "whereas action is dimension (nenvs, A) - the multiplication gets", "in [-1, 1]) if render_eval: eval_env.render() eval_episode_reward += eval_r eval_qs.append(eval_q)", "as DDPG is concerned, every action is in [-1, 1])", "t for key in sorted(combined_stats.keys()): logger.record_tabular(key, combined_stats[key]) if rank ==", "= Memory(limit=int(1e6), action_shape=env.action_space.shape, observation_shape=env.observation_space.shape) critic = Critic(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs)", "logger.info(str(agent.__dict__.items())) if load_path is not None: load_path = osp.expanduser(load_path) ckpt", "from baselines.ddpg.models import Actor, Critic from baselines.ddpg.memory import Memory from", "= np.mean(epoch_episode_steps) combined_stats['rollout/actions_mean'] = np.mean(epoch_actions) combined_stats['rollout/Q_mean'] = np.mean(epoch_qs) combined_stats['train/loss_actor'] =", "'none': pass elif 'adaptive-param' in current_noise_type: _, stddev = current_noise_type.split('_')", "mpi_size = MPI.COMM_WORLD.Get_size() else: mpi_size = 1 # Log stats.", "combined_stats['rollout/actions_std'] = np.std(epoch_actions) # Evaluation statistics. if eval_env is not", "tau=0.01, eval_env=None, param_noise_adaption_interval=50, load_path=None, **network_kwargs): set_global_seeds(seed) if total_timesteps is not", "1: # if simulating multiple envs in parallel, impossible to", "env.observation_space.shape, env.action_space.shape, gamma=gamma, tau=tau, normalize_returns=normalize_returns, normalize_observations=normalize_observations, batch_size=batch_size, action_noise=action_noise, param_noise=param_noise, critic_l2_reg=critic_l2_reg,", "= 0 epoch_episodes += 1 episodes += 1 if nenvs", "= np.float32) #vector episode_step = np.zeros(nenvs, dtype = int) #", "for key in sorted(combined_stats.keys()): logger.record_tabular(key, combined_stats[key]) if rank == 0:", "* nb_rollout_steps) else: nb_epochs = 500 if MPI is not", "= obs.shape[0] episode_reward = np.zeros(nenvs, dtype = np.float32) #vector episode_step", "DDPG is concerned, every action is in [-1, 1]) if", "from baselines.ddpg.memory import Memory from baselines.ddpg.noise import AdaptiveParamNoiseSpec, NormalActionNoise, OrnsteinUhlenbeckActionNoise", "observation_shape=env.observation_space.shape) critic = Critic(nb_actions, ob_shape=env.observation_space.shape, network=network, **network_kwargs) actor = Actor(nb_actions,", "= action.numpy(), q.numpy() # Execute next action. if rank ==", "time.time() epoch_episode_rewards = [] epoch_episode_steps = [] epoch_actions = []", "in current_noise_type: _, stddev = current_noise_type.split('_') action_noise = NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev)", "MPI except ImportError: MPI = None def learn(network, env, seed=None,", "simulating multiple envs in parallel, impossible to reset agent at", "= DDPG(actor, critic, memory, env.observation_space.shape, env.action_space.shape, gamma=gamma, tau=tau, normalize_returns=normalize_returns, normalize_observations=normalize_observations,", "scale for execution in env (as far as DDPG is", "length lists duration = time.time() - start_time stats = agent.get_stats()", "isinstance(x, np.ndarray): assert x.size == 1 return x[0] elif np.isscalar(x):", "unrolled in memory.py's append. obs = new_obs for d in", "= [] epoch_qs = [] epoch_episodes = 0 for epoch", "ckpt.restore(manager.latest_checkpoint) print(\"Restoring from {}\".format(manager.latest_checkpoint)) eval_episode_rewards_history = deque(maxlen=100) episode_rewards_history = deque(maxlen=100)", "the batch new_obs, r, done, info = env.step(max_action * action)", "action_noise=action_noise, param_noise=param_noise, critic_l2_reg=critic_l2_reg, actor_lr=actor_lr, critic_lr=critic_lr, enable_popart=popart, clip_norm=clip_norm, reward_scale=reward_scale) logger.info('Using agent", "epoch_critic_losses.append(cl) epoch_actor_losses.append(al) agent.update_target_net() # Evaluate. eval_episode_rewards = [] eval_qs =", "learn(network, env, seed=None, total_timesteps=None, nb_epochs=None, # with default settings, perform", "tensorflow as tf import numpy as np try: from mpi4py", "actions by {} before executing in env'.format(max_action)) agent = DDPG(actor,", "logger.info('scaling actions by {} before executing in env'.format(max_action)) agent =", "noise_type='adaptive-param_0.2', normalize_returns=False, normalize_observations=True, critic_l2_reg=1e-2, actor_lr=1e-4, critic_lr=1e-3, popart=False, gamma=0.99, clip_norm=None, nb_train_steps=50,", "= OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) else: raise RuntimeError('unknown noise type", "Prepare everything. agent.initialize() agent.reset() obs = env.reset() if eval_env is", "batched data will be unrolled in memory.py's append. obs =", "def as_scalar(x): if isinstance(x, np.ndarray): assert x.size == 1 return", "stats.copy() combined_stats['rollout/return'] = np.mean(epoch_episode_rewards) combined_stats['rollout/return_std'] = np.std(epoch_episode_rewards) combined_stats['rollout/return_history'] = np.mean(episode_rewards_history)", "memory.py's append. obs = new_obs for d in range(len(done)): if", "= eval_qs combined_stats['eval/episodes'] = len(eval_episode_rewards) def as_scalar(x): if isinstance(x, np.ndarray):", "_, _ = agent.step(tf.constant(obs), apply_noise=True, compute_Q=True) action, q = action.numpy(),", "render: env.render() # max_action is of dimension A, whereas action", "epoch_episode_steps.append(episode_step[d]) episode_reward[d] = 0. episode_step[d] = 0 epoch_episodes += 1", "np.mean on variable length lists duration = time.time() - start_time", "_, stddev = current_noise_type.split('_') param_noise = AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev)) elif 'normal'", "clip_norm=None, nb_train_steps=50, # per epoch cycle and MPI worker, nb_eval_steps=100,", "Critic from baselines.ddpg.memory import Memory from baselines.ddpg.noise import AdaptiveParamNoiseSpec, NormalActionNoise,", "MPI.COMM_WORLD.allreduce(combined_stats_sums) combined_stats = {k : v / mpi_size for (k,v)", "statistics. combined_stats['total/epochs'] = epoch + 1 combined_stats['total/steps'] = t for", "epoch_actions.append(action) epoch_qs.append(q) agent.store_transition(obs, action, r, new_obs, done) #the batched data", "= tf.train.CheckpointManager(ckpt, load_path, max_to_keep=None) ckpt.restore(manager.latest_checkpoint) print(\"Restoring from {}\".format(manager.latest_checkpoint)) eval_episode_rewards_history =", "1 # Log stats. # XXX shouldn't call np.mean on", "envs in parallel, impossible to reset agent at the end", "logger import tensorflow as tf import numpy as np try:", "current_noise_type: _, stddev = current_noise_type.split('_') param_noise = AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev)) elif", "current_noise_type.split('_') param_noise = AdaptiveParamNoiseSpec(initial_stddev=float(stddev), desired_action_stddev=float(stddev)) elif 'normal' in current_noise_type: _,", "hasattr(eval_env, 'get_state'): with open(os.path.join(logdir, 'eval_env_state.pkl'), 'wb') as f: pickle.dump(eval_env.get_state(), f)", "current_noise_type: _, stddev = current_noise_type.split('_') action_noise = NormalActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) *", "is None nb_epochs = int(total_timesteps) // (nb_epoch_cycles * nb_rollout_steps) else:", "try: from mpi4py import MPI except ImportError: MPI = None", "dtype = int) # vector episodes = 0 #scalar t", "eval_qs = [] if eval_env is not None: nenvs_eval =", "nb_eval_steps=100, batch_size=64, # per MPI worker tau=0.01, eval_env=None, param_noise_adaption_interval=50, load_path=None,", "if eval_env is not None: combined_stats['eval/return'] = eval_episode_rewards combined_stats['eval/return_history'] =", "deque import pickle from baselines.ddpg.ddpg_learner import DDPG from baselines.ddpg.models import", "= None param_noise = None if noise_type is not None:", "if MPI is not None: combined_stats_sums = MPI.COMM_WORLD.allreduce(combined_stats_sums) combined_stats =", "in range(len(eval_done)): if eval_done[d]: eval_episode_rewards.append(eval_episode_reward[d]) eval_episode_rewards_history.append(eval_episode_reward[d]) eval_episode_reward[d] = 0.0 if", "current_noise_type.split('_') action_noise = OrnsteinUhlenbeckActionNoise(mu=np.zeros(nb_actions), sigma=float(stddev) * np.ones(nb_actions)) else: raise RuntimeError('unknown", "# Book-keeping. epoch_actions.append(action) epoch_qs.append(q) agent.store_transition(obs, action, r, new_obs, done) #the", "* np.ones(nb_actions)) elif 'ou' in current_noise_type: _, stddev = current_noise_type.split('_')", "= None def learn(network, env, seed=None, total_timesteps=None, nb_epochs=None, # with", "elif 'normal' in current_noise_type: _, stddev = current_noise_type.split('_') action_noise =", "actions. memory = Memory(limit=int(1e6), action_shape=env.action_space.shape, observation_shape=env.observation_space.shape) critic = Critic(nb_actions, ob_shape=env.observation_space.shape,", "mpi_size = 1 # Log stats. # XXX shouldn't call", "None param_noise = None if noise_type is not None: for", "* action) # scale for execution in env (as far", "as osp import time from collections import deque import pickle", "elif 'ou' in current_noise_type: _, stddev = current_noise_type.split('_') action_noise =", "eval_episode_rewards combined_stats['eval/return_history'] = np.mean(eval_episode_rewards_history) combined_stats['eval/Q'] = eval_qs combined_stats['eval/episodes'] = len(eval_episode_rewards)", "episode_rewards_history = deque(maxlen=100) # Prepare everything. agent.initialize() agent.reset() obs =", "# Total statistics. combined_stats['total/epochs'] = epoch + 1 combined_stats['total/steps'] =", "combined_stats['train/loss_critic'] = np.mean(epoch_critic_losses) combined_stats['train/param_noise_distance'] = np.mean(epoch_adaptive_distances) combined_stats['total/duration'] = duration combined_stats['total/steps_per_second']", "# Evaluation statistics. if eval_env is not None: combined_stats['eval/return'] =", "nb_epochs is None nb_epochs = int(total_timesteps) // (nb_epoch_cycles * nb_rollout_steps)", "as f: pickle.dump(env.get_state(), f) if eval_env and hasattr(eval_env, 'get_state'): with", "in parallel, impossible to reset agent at the end of", "t_rollout in range(nb_rollout_steps): # Predict next action. action, q, _,", "to reset agent at the end of the episode in", "= 0 # scalar epoch = 0 start_time = time.time()", "agent.adapt_param_noise(obs0) epoch_adaptive_distances.append(distance) cl, al = agent.train() epoch_critic_losses.append(cl) epoch_actor_losses.append(al) agent.update_target_net() #", "noise, if necessary. if memory.nb_entries >= batch_size and t_train %", "action_noise = None param_noise = None if noise_type is not", "0.0 if MPI is not None: mpi_size = MPI.COMM_WORLD.Get_size() else:", "1 if rank == 0 and render: env.render() episode_reward +=", "np try: from mpi4py import MPI except ImportError: MPI =", "shouldn't call np.mean on variable length lists duration = time.time()", "= current_noise_type.strip() if current_noise_type == 'none': pass elif 'adaptive-param' in", "import numpy as np try: from mpi4py import MPI except", "%s'%x) combined_stats_sums = np.array([ np.array(x).flatten()[0] for x in combined_stats.values()]) if", "is concerned, every action is in [-1, 1]) # note", "0. episode_step[d] = 0 epoch_episodes += 1 episodes += 1", "== 0 and render: env.render() # max_action is of dimension", "for epoch in range(nb_epochs): for cycle in range(nb_epoch_cycles): # Perform", "as tf import numpy as np try: from mpi4py import", "(as far as DDPG is concerned, every action is in", "dtype = np.float32) for t_rollout in range(nb_eval_steps): eval_action, eval_q, _,", "normalize_returns=normalize_returns, normalize_observations=normalize_observations, batch_size=batch_size, action_noise=action_noise, param_noise=param_noise, critic_l2_reg=critic_l2_reg, actor_lr=actor_lr, critic_lr=critic_lr, enable_popart=popart, clip_norm=clip_norm,", "the episode in each # of the environments, so resetting", "Log stats. # XXX shouldn't call np.mean on variable length", "MPI is not None: rank = MPI.COMM_WORLD.Get_rank() else: rank =", "max_action is of dimension A, whereas action is dimension (nenvs,", "= duration combined_stats['total/steps_per_second'] = float(t) / float(duration) combined_stats['total/episodes'] = episodes", "in combined_stats.values()]) if MPI is not None: combined_stats_sums = MPI.COMM_WORLD.allreduce(combined_stats_sums)", "total nb_epoch_cycles=20, nb_rollout_steps=100, reward_scale=1.0, render=False, render_eval=False, noise_type='adaptive-param_0.2', normalize_returns=False, normalize_observations=True, critic_l2_reg=1e-2,", "v / mpi_size for (k,v) in zip(combined_stats.keys(), combined_stats_sums)} # Total", "gamma=0.99, clip_norm=None, nb_train_steps=50, # per epoch cycle and MPI worker,", "compute_Q=True) eval_obs, eval_r, eval_done, eval_info = eval_env.step(max_action * eval_action) #", "nb_epochs=None, # with default settings, perform 1M steps total nb_epoch_cycles=20,", "= 500 if MPI is not None: rank = MPI.COMM_WORLD.Get_rank()", "environments, so resetting here instead agent.reset() for t_rollout in range(nb_rollout_steps):", "0: logger.dump_tabular() logger.info('') logdir = logger.get_dir() if rank == 0", "None: assert nb_epochs is None nb_epochs = int(total_timesteps) // (nb_epoch_cycles", "will be unrolled in memory.py's append. obs = new_obs for", "epoch = 0 start_time = time.time() epoch_episode_rewards = [] epoch_episode_steps", "[] eval_qs = [] if eval_env is not None: nenvs_eval" ]
[ "from PySide2.QtWidgets import * from PySide2.QtPrintSupport import QPrinter, QPrintDialog from", "def print_statement(self): printer = QPrinter() printer.setOutputFileName(os.path.join( os.environ.get('HOME'), '%s_%s.pdf' % (self._transaction.created_at.strftime('%Y%m%d'),", "from PySide2.QtCore import * from PySide2.QtWidgets import * from PySide2.QtPrintSupport", "TemplateNotFound: pass else: html = trans.render(transaction=self._transaction, standalone=True) self.statement_tb.setHtml(html) self.print_pb.clicked.connect(self.print_statement) def", "html = trans.render(transaction=self._transaction, printed_at=datetime.now().strftime('%d/%m/%Y, %I:%M:%S %p')) doc = QTextDocument(self) doc.setHtml(html)", "PrinterError class TransactionDetails(QDialog, Ui_TransactionDetails): def __init__(self, transaction, parent=None, *args, **kwargs):", "(self._transaction.created_at.strftime('%Y%m%d'), self._transaction.transaction_code))) if QPrintDialog(printer, self.parentWidget()).exec_() != QDialog.Accepted: return None try:", "__init__(self, transaction, parent=None, *args, **kwargs): QDialog.__init__(self, parent, *args, **kwargs) self._transaction", "self.print_pb.setIcon(QIcon.fromTheme('document-print-symbolic', QIcon(':/icons/print'))) try: trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound: pass else:", "try: trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound as e: raise PrinterError('Printer", "self.setupUi(self) self.setWindowTitle(QApplication.applicationName()) self.print_pb.setIcon(QIcon.fromTheme('document-print-symbolic', QIcon(':/icons/print'))) try: trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound:", "!= QDialog.Accepted: return None try: trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound", ".ui.ui_transaction_details import Ui_TransactionDetails from .ui import images_rc from . import", "self.statement_tb.setHtml(html) self.print_pb.clicked.connect(self.print_statement) def print_statement(self): printer = QPrinter() printer.setOutputFileName(os.path.join( os.environ.get('HOME'), '%s_%s.pdf'", "None try: trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound as e: raise", "from .ui.ui_transaction_details import Ui_TransactionDetails from .ui import images_rc from .", "printed_at=datetime.now().strftime('%d/%m/%Y, %I:%M:%S %p')) doc = QTextDocument(self) doc.setHtml(html) doc.print_(printer) return None", "= trans.render(transaction=self._transaction, standalone=True) self.statement_tb.setHtml(html) self.print_pb.clicked.connect(self.print_statement) def print_statement(self): printer = QPrinter()", "import * from PySide2.QtCore import * from PySide2.QtWidgets import *", ".exceptions import PrinterError class TransactionDetails(QDialog, Ui_TransactionDetails): def __init__(self, transaction, parent=None,", "self.print_pb.clicked.connect(self.print_statement) def print_statement(self): printer = QPrinter() printer.setOutputFileName(os.path.join( os.environ.get('HOME'), '%s_%s.pdf' %", "as e: raise PrinterError('Printer data source unavailable') from e html", "e: raise PrinterError('Printer data source unavailable') from e html =", "QPrinter() printer.setOutputFileName(os.path.join( os.environ.get('HOME'), '%s_%s.pdf' % (self._transaction.created_at.strftime('%Y%m%d'), self._transaction.transaction_code))) if QPrintDialog(printer, self.parentWidget()).exec_()", "import * from PySide2.QtPrintSupport import QPrinter, QPrintDialog from jinja2 import", "QDialog.__init__(self, parent, *args, **kwargs) self._transaction = transaction self.setupUi(self) self.setWindowTitle(QApplication.applicationName()) self.print_pb.setIcon(QIcon.fromTheme('document-print-symbolic',", ". import jinja_env from .exceptions import PrinterError class TransactionDetails(QDialog, Ui_TransactionDetails):", "QIcon(':/icons/print'))) try: trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound: pass else: html", "**kwargs) self._transaction = transaction self.setupUi(self) self.setWindowTitle(QApplication.applicationName()) self.print_pb.setIcon(QIcon.fromTheme('document-print-symbolic', QIcon(':/icons/print'))) try: trans", "os.environ.get('HOME'), '%s_%s.pdf' % (self._transaction.created_at.strftime('%Y%m%d'), self._transaction.transaction_code))) if QPrintDialog(printer, self.parentWidget()).exec_() != QDialog.Accepted:", "self._transaction.transaction_code))) if QPrintDialog(printer, self.parentWidget()).exec_() != QDialog.Accepted: return None try: trans", "if QPrintDialog(printer, self.parentWidget()).exec_() != QDialog.Accepted: return None try: trans =", "= jinja_env.get_template('trans.jinja2.html') except TemplateNotFound as e: raise PrinterError('Printer data source", "html = trans.render(transaction=self._transaction, standalone=True) self.statement_tb.setHtml(html) self.print_pb.clicked.connect(self.print_statement) def print_statement(self): printer =", "= jinja_env.get_template('trans.jinja2.html') except TemplateNotFound: pass else: html = trans.render(transaction=self._transaction, standalone=True)", "datetime from PySide2.QtGui import * from PySide2.QtCore import * from", "parent, *args, **kwargs) self._transaction = transaction self.setupUi(self) self.setWindowTitle(QApplication.applicationName()) self.print_pb.setIcon(QIcon.fromTheme('document-print-symbolic', QIcon(':/icons/print')))", "raise PrinterError('Printer data source unavailable') from e html = trans.render(transaction=self._transaction,", "*args, **kwargs): QDialog.__init__(self, parent, *args, **kwargs) self._transaction = transaction self.setupUi(self)", "from PySide2.QtGui import * from PySide2.QtCore import * from PySide2.QtWidgets", "source unavailable') from e html = trans.render(transaction=self._transaction, printed_at=datetime.now().strftime('%d/%m/%Y, %I:%M:%S %p'))", "printer.setOutputFileName(os.path.join( os.environ.get('HOME'), '%s_%s.pdf' % (self._transaction.created_at.strftime('%Y%m%d'), self._transaction.transaction_code))) if QPrintDialog(printer, self.parentWidget()).exec_() !=", "Ui_TransactionDetails): def __init__(self, transaction, parent=None, *args, **kwargs): QDialog.__init__(self, parent, *args,", "PrinterError('Printer data source unavailable') from e html = trans.render(transaction=self._transaction, printed_at=datetime.now().strftime('%d/%m/%Y,", "trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound: pass else: html = trans.render(transaction=self._transaction,", "jinja2 import TemplateNotFound from .ui.ui_transaction_details import Ui_TransactionDetails from .ui import", "import Ui_TransactionDetails from .ui import images_rc from . import jinja_env", "from e html = trans.render(transaction=self._transaction, printed_at=datetime.now().strftime('%d/%m/%Y, %I:%M:%S %p')) doc =", "os from datetime import datetime from PySide2.QtGui import * from", "PySide2.QtCore import * from PySide2.QtWidgets import * from PySide2.QtPrintSupport import", "QPrintDialog(printer, self.parentWidget()).exec_() != QDialog.Accepted: return None try: trans = jinja_env.get_template('trans.jinja2.html')", "from .ui import images_rc from . import jinja_env from .exceptions", "unavailable') from e html = trans.render(transaction=self._transaction, printed_at=datetime.now().strftime('%d/%m/%Y, %I:%M:%S %p')) doc", "def __init__(self, transaction, parent=None, *args, **kwargs): QDialog.__init__(self, parent, *args, **kwargs)", "**kwargs): QDialog.__init__(self, parent, *args, **kwargs) self._transaction = transaction self.setupUi(self) self.setWindowTitle(QApplication.applicationName())", "* from PySide2.QtCore import * from PySide2.QtWidgets import * from", "*args, **kwargs) self._transaction = transaction self.setupUi(self) self.setWindowTitle(QApplication.applicationName()) self.print_pb.setIcon(QIcon.fromTheme('document-print-symbolic', QIcon(':/icons/print'))) try:", "from datetime import datetime from PySide2.QtGui import * from PySide2.QtCore", "= transaction self.setupUi(self) self.setWindowTitle(QApplication.applicationName()) self.print_pb.setIcon(QIcon.fromTheme('document-print-symbolic', QIcon(':/icons/print'))) try: trans = jinja_env.get_template('trans.jinja2.html')", "* from PySide2.QtWidgets import * from PySide2.QtPrintSupport import QPrinter, QPrintDialog", "import datetime from PySide2.QtGui import * from PySide2.QtCore import *", "PySide2.QtGui import * from PySide2.QtCore import * from PySide2.QtWidgets import", "QPrinter, QPrintDialog from jinja2 import TemplateNotFound from .ui.ui_transaction_details import Ui_TransactionDetails", "jinja_env.get_template('trans.jinja2.html') except TemplateNotFound as e: raise PrinterError('Printer data source unavailable')", ".ui import images_rc from . import jinja_env from .exceptions import", "from PySide2.QtPrintSupport import QPrinter, QPrintDialog from jinja2 import TemplateNotFound from", "standalone=True) self.statement_tb.setHtml(html) self.print_pb.clicked.connect(self.print_statement) def print_statement(self): printer = QPrinter() printer.setOutputFileName(os.path.join( os.environ.get('HOME'),", "import * from PySide2.QtWidgets import * from PySide2.QtPrintSupport import QPrinter,", "* from PySide2.QtPrintSupport import QPrinter, QPrintDialog from jinja2 import TemplateNotFound", "transaction, parent=None, *args, **kwargs): QDialog.__init__(self, parent, *args, **kwargs) self._transaction =", "self._transaction = transaction self.setupUi(self) self.setWindowTitle(QApplication.applicationName()) self.print_pb.setIcon(QIcon.fromTheme('document-print-symbolic', QIcon(':/icons/print'))) try: trans =", "data source unavailable') from e html = trans.render(transaction=self._transaction, printed_at=datetime.now().strftime('%d/%m/%Y, %I:%M:%S", "QDialog.Accepted: return None try: trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound as", "<filename>footprints/transaction_details.py import os from datetime import datetime from PySide2.QtGui import", "parent=None, *args, **kwargs): QDialog.__init__(self, parent, *args, **kwargs) self._transaction = transaction", "import PrinterError class TransactionDetails(QDialog, Ui_TransactionDetails): def __init__(self, transaction, parent=None, *args,", "self.setWindowTitle(QApplication.applicationName()) self.print_pb.setIcon(QIcon.fromTheme('document-print-symbolic', QIcon(':/icons/print'))) try: trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound: pass", "self.parentWidget()).exec_() != QDialog.Accepted: return None try: trans = jinja_env.get_template('trans.jinja2.html') except", "TemplateNotFound from .ui.ui_transaction_details import Ui_TransactionDetails from .ui import images_rc from", "print_statement(self): printer = QPrinter() printer.setOutputFileName(os.path.join( os.environ.get('HOME'), '%s_%s.pdf' % (self._transaction.created_at.strftime('%Y%m%d'), self._transaction.transaction_code)))", "TransactionDetails(QDialog, Ui_TransactionDetails): def __init__(self, transaction, parent=None, *args, **kwargs): QDialog.__init__(self, parent,", "import images_rc from . import jinja_env from .exceptions import PrinterError", "return None try: trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound as e:", "except TemplateNotFound: pass else: html = trans.render(transaction=self._transaction, standalone=True) self.statement_tb.setHtml(html) self.print_pb.clicked.connect(self.print_statement)", "transaction self.setupUi(self) self.setWindowTitle(QApplication.applicationName()) self.print_pb.setIcon(QIcon.fromTheme('document-print-symbolic', QIcon(':/icons/print'))) try: trans = jinja_env.get_template('trans.jinja2.html') except", "jinja_env.get_template('trans.jinja2.html') except TemplateNotFound: pass else: html = trans.render(transaction=self._transaction, standalone=True) self.statement_tb.setHtml(html)", "datetime import datetime from PySide2.QtGui import * from PySide2.QtCore import", "= QPrinter() printer.setOutputFileName(os.path.join( os.environ.get('HOME'), '%s_%s.pdf' % (self._transaction.created_at.strftime('%Y%m%d'), self._transaction.transaction_code))) if QPrintDialog(printer,", "try: trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound: pass else: html =", "import os from datetime import datetime from PySide2.QtGui import *", "trans.render(transaction=self._transaction, printed_at=datetime.now().strftime('%d/%m/%Y, %I:%M:%S %p')) doc = QTextDocument(self) doc.setHtml(html) doc.print_(printer) return", "e html = trans.render(transaction=self._transaction, printed_at=datetime.now().strftime('%d/%m/%Y, %I:%M:%S %p')) doc = QTextDocument(self)", "import jinja_env from .exceptions import PrinterError class TransactionDetails(QDialog, Ui_TransactionDetails): def", "Ui_TransactionDetails from .ui import images_rc from . import jinja_env from", "import QPrinter, QPrintDialog from jinja2 import TemplateNotFound from .ui.ui_transaction_details import", "printer = QPrinter() printer.setOutputFileName(os.path.join( os.environ.get('HOME'), '%s_%s.pdf' % (self._transaction.created_at.strftime('%Y%m%d'), self._transaction.transaction_code))) if", "% (self._transaction.created_at.strftime('%Y%m%d'), self._transaction.transaction_code))) if QPrintDialog(printer, self.parentWidget()).exec_() != QDialog.Accepted: return None", "trans.render(transaction=self._transaction, standalone=True) self.statement_tb.setHtml(html) self.print_pb.clicked.connect(self.print_statement) def print_statement(self): printer = QPrinter() printer.setOutputFileName(os.path.join(", "class TransactionDetails(QDialog, Ui_TransactionDetails): def __init__(self, transaction, parent=None, *args, **kwargs): QDialog.__init__(self,", "trans = jinja_env.get_template('trans.jinja2.html') except TemplateNotFound as e: raise PrinterError('Printer data", "TemplateNotFound as e: raise PrinterError('Printer data source unavailable') from e", "except TemplateNotFound as e: raise PrinterError('Printer data source unavailable') from", "QPrintDialog from jinja2 import TemplateNotFound from .ui.ui_transaction_details import Ui_TransactionDetails from", "pass else: html = trans.render(transaction=self._transaction, standalone=True) self.statement_tb.setHtml(html) self.print_pb.clicked.connect(self.print_statement) def print_statement(self):", "import TemplateNotFound from .ui.ui_transaction_details import Ui_TransactionDetails from .ui import images_rc", "PySide2.QtWidgets import * from PySide2.QtPrintSupport import QPrinter, QPrintDialog from jinja2", "from jinja2 import TemplateNotFound from .ui.ui_transaction_details import Ui_TransactionDetails from .ui", "from . import jinja_env from .exceptions import PrinterError class TransactionDetails(QDialog,", "jinja_env from .exceptions import PrinterError class TransactionDetails(QDialog, Ui_TransactionDetails): def __init__(self,", "from .exceptions import PrinterError class TransactionDetails(QDialog, Ui_TransactionDetails): def __init__(self, transaction,", "else: html = trans.render(transaction=self._transaction, standalone=True) self.statement_tb.setHtml(html) self.print_pb.clicked.connect(self.print_statement) def print_statement(self): printer", "images_rc from . import jinja_env from .exceptions import PrinterError class", "'%s_%s.pdf' % (self._transaction.created_at.strftime('%Y%m%d'), self._transaction.transaction_code))) if QPrintDialog(printer, self.parentWidget()).exec_() != QDialog.Accepted: return", "= trans.render(transaction=self._transaction, printed_at=datetime.now().strftime('%d/%m/%Y, %I:%M:%S %p')) doc = QTextDocument(self) doc.setHtml(html) doc.print_(printer)", "PySide2.QtPrintSupport import QPrinter, QPrintDialog from jinja2 import TemplateNotFound from .ui.ui_transaction_details" ]
[ "equal to other. \"\"\" # Check that the other is", "optional Which columns to read, with 0 being the first.", "isinstance(ret, YTArray): if inp.units.same_dimensions_as(ret.units): ret.in_units(inp.units) return ret def sanitize_units_add(this_object, other_object,", "expm1, log1p, sqrt, square, \\ reciprocal, sin, cos, tan, arcsin,", "try: # numpy 1.13 or newer from numpy import positive,", "Examples -------- >>> a = yt.YTArray(1.0e7,\"K\") >>> a.to_equivalent(\"keV\", \"thermal\") \"\"\"", "optional The Pint UnitRegistry to use in the conversion. If", "arr2.units arr = YTArray(v, units, registry=registry) return arr def uintersect1d(arr1,", "not_equal: comparison_unit, equal: comparison_unit, logical_and: comparison_unit, logical_or: comparison_unit, logical_xor: comparison_unit,", "must have the same units. See the documentation of numpy.concatenate", "out=None, ddof=0): return super(YTArray, self).std(axis, dtype, out, ddof), self.units def", "other.units) return other.in_units(this.units) return other @lru_cache(maxsize=128, typed=False) def _unit_repr_check_same(my_units, other_units):", "using python syntax (cm**3, not cm^3). registry : ~yt.units.unit_registry.UnitRegistry The", "unit2=None): return unit def return_without_unit(unit, unit2=None): return None def arctan2_unit(unit1,", "# Attach the units obj.units = units return obj def", "of the result. For example, if ``axis=0`` it will be", "__radd__(self, left_object): \"\"\" See __add__. \"\"\" lo = sanitize_units_add(self, left_object,", "to_ndarray(self): \"\"\" Creates a copy of this array with the", "# size > 1 out_arr = YTArray(np.asarray(out_arr), unit) else: out_arr", "passthrough_unit, fabs: passthrough_unit, rint: return_without_unit, sign: return_without_unit, conj: passthrough_unit, exp:", "that you wish to convert to. equiv : string The", "\"\"\" Convert a YTArray or YTQuantity to a Pint Quantity.", "avoid a circular import from yt.funcs def iterable(obj): try: len(obj)", "return self.in_units(self.units.get_cgs_equivalent()) def in_mks(self): \"\"\" Creates a copy of this", "sympy import Rational from yt.units.unit_lookup_table import \\ default_unit_symbol_lut from yt.units.equivalencies", "self) return self def convert_to_base(self, unit_system=\"cgs\"): \"\"\" Convert the array", "__mul__. \"\"\" oth = sanitize_units_mul(self, other) np.multiply(self, oth, out=self) return", "ufunc in trigonometric_operators: inp = inp.in_units('radian').v if out_arr is not", "p_units, registry=unit_registry) def to_pint(self, unit_registry=None): \"\"\" Convert a YTArray or", "d.shape == self.shape and d.dtype == self.dtype: d[...] = self", "converted to cgs units. \"\"\" return self.in_units(self.units.get_cgs_equivalent()) def in_mks(self): \"\"\"", "\"\"\" # converts if possible oth = validate_comparison_units(self, other, 'less_than')", "None. return False oth = validate_comparison_units(self, other, 'equal') return super(YTArray,", "``usecols = (1,4,5)`` will extract the 2nd, 5th and 6th", "units) # # End unit conversion methods # def write_hdf5(self,", "copy(self, order='C'): return type(self)(np.copy(np.asarray(self)), self.units) def __array_finalize__(self, obj): if obj", "handle being multiplied. \"\"\" ro = sanitize_units_mul(self, right_object) return super(YTArray,", "units[0].dimensions == units[1].dimensions: out_arr = np.multiply(out_arr.view(np.ndarray), unit.base_value, out=out) unit =", "be used instead of the registry associated with the unit", "registry is None and isinstance(input_units, (str, bytes)): if input_units.startswith('code_'): raise", "fmin, \\ isreal, iscomplex, isfinite, isinf, isnan, signbit, copysign, nextafter,", "norm = np.linalg.norm(data, ord=ord, axis=axis) else: norm = np.linalg.norm(data, ord=ord,", "else: g = f.create_group(group_name) else: g = f if dataset_name", "dimensionless YTArray. The pow value. \"\"\" if isinstance(power, YTArray): if", "the conversion. If not specified, the default base units of", "ret_class(inps[0]).units)) return inps, units def handle_comparison_units(inps, units, ufunc, ret_class, raise_error=False):", "def to_equivalent(self, unit, equiv, **kwargs): \"\"\" Convert a YTArray or", "def uhstack(arrs): \"\"\"Stack arrays in sequence horizontally (column wise) while", "a YTArray or YTQuantity to an equivalent, e.g., something that", "other): \"\"\" See __mul__. \"\"\" oth = sanitize_units_mul(self, other) np.multiply(self,", "func(*args, **kwargs) if ret.shape == (): return YTQuantity(ret, units) else:", "unit = Unit(registry=unit.registry) return out, out_arr, unit def coerce_iterable_units(input_object): if", "is None: u = NULL_UNIT if u.dimensions is angle and", "in that system's base units. Parameters ---------- unit_system : string,", "data in the iterable. return YTArray(input_object) return input_object else: return", "cm \"\"\" v = np.union1d(arr1, arr2) v = validate_numpy_wrapper_units(v, [arr1,", "unit2 = Unit(registry=getattr(unit1, 'registry', None)) elif ufunc is power: unit2", "view of the array data.\"\"\" return self.ndarray_view() d = ndview", "restore the state of the ndarray. This is always defined", "that other is a YTArray. if hasattr(other, 'units'): if this.units.expr", "\"\"\" An alias for YTArray.in_units(). See the docstrings of that", "def __ror__(self, left_object): return super(YTArray, self).__ror__(left_object) def __ior__(self, other): np.bitwise_or(self,", "= ( conv_unit.has_equivalent(equiv) or this_equiv._one_way) if self.has_equivalent(equiv) and oneway_or_equivalent: new_arr", "ufunc with %i inputs has not been\" \"added to YTArray.\"", "Subtract the object on the right of the `-` from", "True else: # Here we catch the first line of", "For example, if ``axis=0`` it will be the first dimension", "return_without_unit, isnan: return_without_unit, signbit: return_without_unit, copysign: passthrough_unit, nextafter: preserve_units, modf:", "and returns it in that system's base units. Parameters ----------", "getattr(obj, 'units', NULL_UNIT) def __pos__(self): \"\"\" Posify the data. \"\"\"", "will be used. Examples -------- >>> from pint import UnitRegistry", "two input arrays. A wrapper around numpy.intersect1d that preserves units.", "d = g.create_dataset(dataset_name, data=self) else: d = g.create_dataset(dataset_name, data=self) for", "coerce_iterable_units(other_object) # Make sure the other object is a YTArray", ">>> b = sp[\"temperature\"] >>> c = sp[\"velocity_x\"] >>> yt.savetxt(\"sphere.dat\",", "AstroPy quantity.\") return self.value*_astropy.units.Unit(str(self.units), **kwargs) @classmethod def from_pint(cls, arr, unit_registry=None):", "= unit return out_arr def copy(self, order='C'): return type(self)(np.copy(np.asarray(self)), self.units)", "hasattr(self, 'units'): ret.units = self.units return ret # # Start", "preserve_units, deg2rad: return_without_unit, rad2deg: return_without_unit, bitwise_and: bitop_units, bitwise_or: bitop_units, bitwise_xor:", "wrapper around numpy.cross preserves units. See the documentation of numpy.cross", "elif ufunc in binary_operators: unit_operator = self._ufunc_registry[context[0]] inps, units, ret_class", "inps, units, ufunc, ret_class) unit = unit_operator(*units) out_arr = func(np.asarray(inps[0]),", "3], 'cm') >>> b = YTArray([4, 5, 6], 'm') >>>", "third element of the returned tuple, itself a tuple used", "# units kwarg set, but it's not a Unit object.", "np.floor_divide(self, oth, out=self) return self def __or__(self, right_object): return super(YTArray,", "3, 4], 'cm') >>> uintersect1d(A, B) YTArray([ 2., 3.]) cm", "not None: out_orig[0].flat[:] = out.flat[:] if isinstance(out_orig[0], YTArray): out_orig[0].units =", "serialized by pickle. \"\"\" super(YTArray, self).__setstate__(state[1:]) try: unit, lut =", "line[0] == comments: if next_one: units = words[1:] if len(words)", "0, 1, 2, 3]) g/cm**3 and strip them when it", "self def __and__(self, right_object): return super(YTArray, self).__and__(right_object) def __rand__(self, left_object):", "= f dataset = g[dataset_name] data = dataset[:] units =", "copysign, nextafter, \\ modf, ldexp, frexp, fmod, floor, ceil, trunc,", "yt >>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.quan(5, 'code_length')", "data in the equivalent cgs units, and returns it. Returns", "not isinstance(other_units, Unit): other_units = Unit(other_units, registry=my_units.registry) equiv_dims = em_dimensions.get(my_units.dimensions,", "self).__rsub__(lo) def __isub__(self, other): \"\"\" See __sub__. \"\"\" oth =", "fmod: preserve_units, absolute: passthrough_unit, fabs: passthrough_unit, rint: return_without_unit, sign: return_without_unit,", "a YTArray. oth = validate_comparison_units(self, other, 'greater than') return super(YTArray,", "if isinstance(unit2, YTArray): if unit2.units.is_dimensionless: pass else: raise YTUnitOperationError(ufunc, unit1,", "a wrapper around np.linalg.norm that preserves units. See the documentation", "input_array.view(cls) if input_units is None: if registry is None: ret.units", "mix units with the same # dimensions. if isinstance(ret, YTArray):", "defined for numpy arrays. \"\"\" np_ret = super(YTArray, self).__reduce__() obj_state", "from yt import YTArray >>> a = YTArray([1, 2, 3],", "return type(self)(np.copy(np.asarray(self)), self.units) def __array_finalize__(self, obj): if obj is None", "this YTArray to some power. Parameters ---------- power : float", "else: units = Unit(str(input_array.units), registry=registry) ret.units = units elif isinstance(input_units,", "units : Unit object or string, optional The units you", "603.]) cm >>> b + a YTArray([ 4.01, 5.02, 6.03])", "right_object, \"subtraction\") return super(YTArray, self).__sub__(ro) def __rsub__(self, left_object): \"\"\" See", "to hdf5 file. Parameters ---------- filename: string The filename to", "identical units.\") v.units = a1.units return v def uconcatenate(arrs, axis=0):", "if self.has_equivalent(equiv) and oneway_or_equivalent: new_arr = this_equiv.convert( self, conv_unit.dimensions, **kwargs)", "isnat, heaviside = (None,)*4 from yt.units.unit_object import Unit, UnitParseError from", "This is equivalent to: >>> b = YTQuantity(5, 'code_length', registry=ds.unit_registry)", "# Converting from AstroPy Quantity u = arr.unit ap_units =", "arrays from. If not specified, the arrays are datasets at", "default: '#'. Examples -------- >>> temp, velx = yt.loadtxt(\"sphere.dat\", usecols=(1,2),", "this ytarray to the object on the right of the", "of numpy.intersect1d for full details. Examples -------- >>> A =", "self.to_equivalent(units, equivalence, **kwargs) def to(self, units, equivalence=None, **kwargs): \"\"\" An", "default, this is any whitespace. usecols : sequence, optional Which", "UnitRegistry, optional A yt unit registry to use in the", "YTQuantity is tightly integrated with yt datasets: >>> import yt", "quantity in. equivalence : string, optional The equivalence you wish", "units @return_arr def mean(self, axis=None, dtype=None, out=None): return super(YTArray, self).mean(axis,", "= yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.arr(np.ones(5), 'code_length') >>> a.in_cgs() YTArray([", "finfo.sampling_type == 'particle': units = finfo.output_units else: units = finfo.units", "for k in d.attrs.keys(): del d.attrs[k] else: del f[dataset_name] d", "-------- >>> from yt import YTQuantity >>> a = YTQuantity(1,", "is greater than the object on the right. \"\"\" #", "units with the same # dimensions. if isinstance(ret, YTArray): if", "this.units, other.units) return other.in_units(this.units) return other @lru_cache(maxsize=128, typed=False) def _unit_repr_check_same(my_units,", "return out_arr def copy(self, order='C'): return type(self)(np.copy(np.asarray(self)), self.units) def __array_finalize__(self,", "preserve_units, absolute: passthrough_unit, fabs: passthrough_unit, rint: return_without_unit, sign: return_without_unit, conj:", "== 2: unit_operator = self._ufunc_registry[ufunc] inps, units, ret_class = get_inp_u_binary(ufunc,", "See the documentation of numpy.cross for full details. \"\"\" v", "on the right. \"\"\" # converts if possible oth =", "def std(self, axis=None, dtype=None, out=None, ddof=0): return super(YTArray, self).std(axis, dtype,", "\"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rdiv__(lo) def __idiv__(self,", "COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- import copy import numpy", "to use. To see which equivalencies are supported for this", "this unitful quantity, try the :meth:`list_equivalencies` method. Default: None Returns", "arrs) return v def ustack(arrs, axis=0): \"\"\"Join a sequence of", "hdf5 file. Parameters ---------- filename: string The filename to create", "np.loadtxt(fname, dtype=dtype, comments=comments, delimiter=delimiter, converters=None, unpack=True, usecols=usecols, ndmin=0) if usecols", "ufunc, ret_class): if units[0] != units[1]: any_nonzero = [np.any(inps[0]), np.any(inps[1])]", "= func(np.asarray(inps[0]), np.asarray(inps[1]), out=out, **kwargs) if unit_operator in (multiply_units, divide_units):", "self.view(np.ndarray) def to_ndarray(self): \"\"\" Creates a copy of this array", "@return_arr def prod(self, axis=None, dtype=None, out=None): if axis is not", "type(self)(ret, copy.deepcopy(self.units)) class YTQuantity(YTArray): \"\"\" A scalar associated with a", "if isinstance(input_object, np.ndarray): return input_object if iterable(input_object): if any([isinstance(o, YTArray)", "already associated with a unit registry and this is specified,", "None) if u is None: u = NULL_UNIT if u.dimensions", "isinstance(arr.value, np.ndarray): return YTArray(arr.value, ap_units, registry=unit_registry) else: return YTQuantity(arr.value, ap_units,", "This is a wrapper around np.vstack that preserves units. \"\"\"", "not my_units.same_dimensions_as(other_units): raise YTUnitConversionError( my_units, my_units.dimensions, other_units, other_units.dimensions) return other_units", "self).__div__(ro) def __rdiv__(self, left_object): \"\"\" See __div__. \"\"\" lo =", "registry=registry) return arr def uintersect1d(arr1, arr2, assume_unique=False): \"\"\"Find the sorted", "\"\"\" super(YTArray, self).__setstate__(state[1:]) try: unit, lut = state[0] except TypeError:", "if LooseVersion(np.__version__) < LooseVersion('1.13.0'): def __add__(self, right_object): \"\"\" Add this", "logaddexp2, true_divide, power, remainder, mod, arctan2, hypot, bitwise_and, bitwise_or, bitwise_xor,", "log, log2, log10, expm1, log1p, sqrt, square, \\ reciprocal, sin,", "If input_units is already associated with a unit registry and", "oth = sanitize_units_mul(self, other) np.true_divide(self, oth, out=self) return self def", "YTArray. oth = validate_comparison_units( self, other, 'greater than or equal')", "kwarg set, but it's not a Unit object. # don't", "@property def value(self): \"\"\"Get a copy of the array data", "if u is None: u = NULL_UNIT if u.dimensions is", "group_name=None): r\"\"\"Writes a YTArray to hdf5 file. Parameters ---------- filename:", "def __new__(cls, input_array, input_units=None, registry=None, dtype=None, bypass_validation=False): if dtype is", "axis=0): \"\"\"Concatenate a sequence of arrays. This wrapper around numpy.concatenate", "method) if 'out' in kwargs: out_orig = kwargs.pop('out') out =", "if not this.units.same_dimensions_as(other.units): raise YTUnitOperationError(op_string, this.units, other.units) return other.in_units(this.units) return", "super(YTArray, self).__div__(ro) def __rdiv__(self, left_object): \"\"\" See __div__. \"\"\" lo", "must have the same number of values. Parameters ---------- fname", "not None: out_arr = ufunc(inp).view(np.ndarray) return out_arr, inp, u def", "@lru_cache(maxsize=128, typed=False) def reciprocal_unit(unit): return unit**-1 def passthrough_unit(unit, unit2=None): return", "other): \"\"\" Test if this is equal to the object", "units[1]) elif any_nonzero[1] == np.bool_(False): units = (units[0], units[0]) else:", "return self.view(np.ndarray) def to_ndarray(self): \"\"\" Creates a copy of this", "self).__abs__() # # Start comparison operators. # def __lt__(self, other):", "input_units else: ret.units = Unit(input_units, registry=registry) return ret elif isinstance(input_array,", "be used. \"\"\" # Converting from AstroPy Quantity u =", "for full details. \"\"\" v = np.cross(arr1, arr2, axisa=axisa, axisb=axisb,", "if u.dimensions is angle and ufunc in trigonometric_operators: inp =", "The equivalence you wish to use. To see which equivalencies", "is None: dataset_name = 'array_data' f = h5py.File(filename) if group_name", "YTQuantity): return cls1 if issubclass(cls1, cls2): return cls1 if issubclass(cls2,", "sin: return_without_unit, cos: return_without_unit, tan: return_without_unit, sinh: return_without_unit, cosh: return_without_unit,", "context[1] if ufunc in unary_operators: out_arr, inp, u = get_inp_u_unary(ufunc,", "self).__pow__ returns a YTArray # with a unit attribute set", "See __div__. \"\"\" oth = sanitize_units_mul(self, other) np.divide(self, oth, out=self)", "raise YTIterableUnitCoercionError(input_object) # This will create a copy of the", "from __future__ import print_function #----------------------------------------------------------------------------- # Copyright (c) 2013, yt", "units The units of the array. Powers must be specified", "out=None): if axis is not None: units = self.units**self.shape[axis] else:", "return super(YTArray, self).__rsub__(lo) def __isub__(self, other): \"\"\" See __sub__. \"\"\"", "in usecols] mylog.info(\"Array units: %s\" % \", \".join(units)) return tuple([YTArray(arr,", "YTQuantity The unitful quantity to convert from. unit_registry : Pint", "the data in the equivalent cgs units, and returns it.", "for YTArray instances\") def bitop_units(unit1, unit2): raise TypeError( \"Bit-twiddling operators", "sanitize_units_add(self, other, \"addition\") np.add(self, oth, out=self) return self def __sub__(self,", "= inp2 if isinstance(unit2, np.ndarray): if isinstance(unit2, YTArray): if unit2.units.is_dimensionless:", "of the array. Powers must be specified using python syntax", "the default one will be used. NOTE: This is not", "return_without_unit, true_divide: divide_units, floor_divide: divide_units, negative: passthrough_unit, power: power_unit, remainder:", "__add__. \"\"\" oth = sanitize_units_add(self, other, \"addition\") np.add(self, oth, out=self)", "str(state[0]), default_unit_symbol_lut.copy() # need to fix up the lut if", "right_object): ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__truediv__(ro) def __rtruediv__(self,", "system. Parameters ---------- unit_system : string, optional The unit system", "self.units = getattr(obj, 'units', NULL_UNIT) def __pos__(self): \"\"\" Posify the", "to convert from. unit_registry : Pint UnitRegistry, optional The Pint", "convert_pint_units(base) p_units.append(\"%s**(%s)\" % (bs, Rational(exponent))) p_units = \"*\".join(p_units) if isinstance(arr.magnitude,", "else: g = f if dataset_name in g.keys(): d =", "logical_xor, logical_not, maximum, minimum, fmax, fmin, \\ isreal, iscomplex, isfinite,", "to significant speedups in the input validation logic adds significant", "file unit, lut = str(state[0]), default_unit_symbol_lut.copy() # need to fix", "ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__truediv__(ro) def __rtruediv__(self, left_object):", "type(self)(self.ndview * conversion_factor, new_units) if offset: np.subtract(new_array, offset*new_array.uq, new_array) return", "\"\"\"Matrix or vector dot product that preserves units This is", "= None f.close() registry = UnitRegistry(lut=unit_lut, add_default_symbols=False) return cls(data, units,", "= Unit(registry=getattr(unit2, 'registry', None)) if unit2 is None and ufunc", "expected by e.g. ``yt.loadtxt``. Examples -------- >>> sp = ds.sphere(\"c\",", "out_arr, context=None): ret = super(YTArray, self).__array_wrap__(out_arr, context) if isinstance(ret, YTQuantity)", "axis=None, dtype=None, out=None): if axis is not None: units =", "under the terms of the Modified BSD License. # #", "cls1): return cls2 else: raise RuntimeError(\"Undefined operation for a YTArray", "return v def uhstack(arrs): \"\"\"Stack arrays in sequence horizontally (column", "# Make sure the other object is a YTArray before", "the equivalency, which should be used if that particular equivalency", "# numpy 1.13 or newer from numpy import positive, divmod", "if out is not None: out_orig[0].flat[:] = out.flat[:] if isinstance(out_orig[0],", "have the same units. See the documentation of numpy.intersect1d for", "columns to read, with 0 being the first. For example,", "if isinstance(new_arr, tuple): try: return type(self)(new_arr[0], new_arr[1]).in_units(unit) except YTUnitConversionError: raise", "from the metadata extracted in __reduce__ and then serialized by", "(np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls2, (numeric_type, np.number, list, tuple)): return", "Nothing provided. Make dimensionless... units = Unit() elif isinstance(input_units, Unit):", "\"\"\" # this needs to be defined for all numpy", "Examples -------- >>> from pint import UnitRegistry >>> import numpy", "\"\"\" See __mul__. \"\"\" oth = sanitize_units_mul(self, other) np.multiply(self, oth,", "YTArray or YTQuantity has an equivalent unit in *equiv*. \"\"\"", "appropriate. >>> import numpy as np >>> a = YTQuantity(12,", "by the object on the right of the `/` operator.", "Examples -------- >>> from yt import YTQuantity >>> a =", "string The units you want to get a new quantity", "tanh: return_without_unit, arcsin: return_without_unit, arccos: return_without_unit, arctan: return_without_unit, arctan2: arctan2_unit,", "being multiplied. \"\"\" ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__mul__(ro)", "return super(YTArray, self).__xor__(right_object) def __rxor__(self, left_object): return super(YTArray, self).__rxor__(left_object) def", "equivalence=None, **kwargs): \"\"\" Creates a copy of this array with", "import print_function #----------------------------------------------------------------------------- # Copyright (c) 2013, yt Development Team.", "return super(YTArray, self).__rxor__(left_object) def __ixor__(self, other): np.bitwise_xor(self, other, out=self) return", "to write to append as attributes to the dataset. group_name:", "\"\"\" Return a YTArray with the abs of the data.", "annoying to deal with them. >>> print(np.log10(a)) 1.07918124605 YTQuantity is", "defines # hour as \"h\" if unit_str == \"h\": unit_str", "any([u1d, u2d]): if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) else: if", "if usecols is not None: units = [units[col] for col", ": string, optional The unit system to be used in", "if unit_operator in (comparison_unit, arctan2_unit): inps, units = handle_comparison_units( inps,", "def mean(self, axis=None, dtype=None, out=None): return super(YTArray, self).mean(axis, dtype, out),", "\\n\" \"ds.arr(%s, \\\"%s\\\")\" % (input_array, input_units) ) if isinstance(input_array, YTArray):", "preserve_units: inps, units = handle_preserve_units( inps, units, ufunc, ret_class) unit", "obj = np.asarray(input_array, dtype=dtype).view(cls) # Check units type if input_units", "'g/cm**3') >>> np.abs(a) YTArray([4, 3, 2, 1, 0, 1, 2,", "= sanitize_units_add(self, other, \"subtraction\") np.subtract(self, oth, out=self) return self def", "if not isinstance(input_scalar, (numeric_type, np.number, np.ndarray)): raise RuntimeError(\"YTQuantity values must", "B = yt.YTArray([2, 3, 4], 'cm') >>> uconcatenate((A, B)) YTArray([", "other, \"subtraction\") np.subtract(self, oth, out=self) return self def __neg__(self): \"\"\"", "methods # def convert_to_units(self, units): \"\"\" Convert the array and", "at the top level by default. \"\"\" import h5py from", "the object on the right. \"\"\" # Check that the", "and this is specified, this will be used instead of", "because AstroPy is silly and defines # hour as \"h\"", "= YTArray(np.ones(5), 'code_length', registry=ds.unit_registry) >>> np.all(a == b) True \"\"\"", "a YTArray with the abs of the data. \"\"\" return", "nextafter, \\ modf, ldexp, frexp, fmod, floor, ceil, trunc, fabs,", "for the correct (same dimension) units. \"\"\" ro = sanitize_units_add(self,", "registry = UnitRegistry(lut=unit_lut, add_default_symbols=False) return cls(data, units, registry=registry) # #", "fmt='%.18e', delimiter='\\t', header='', footer='', comments='#'): r\"\"\" Write YTArrays with unit", "Distributed under the terms of the Modified BSD License. #", "the data in the specified unit system, and returns it", "not_equal, equal, logical_and, \\ logical_or, logical_xor, logical_not, maximum, minimum, fmax,", "= arr1.units * arr2.units arr = YTArray(v, units, registry=registry) return", "convert_to_mks(self): \"\"\" Convert the array and units to the equivalent", "def array_like_field(data, x, field): field = data._determine_fields(field)[0] if isinstance(field, tuple):", "UnitParseError from yt.units.unit_registry import UnitRegistry from yt.units.dimensions import \\ angle,", "True, all input validation is skipped. Using this option may", "them. Parameters ---------- units : Unit object or string, optional", "def validate_numpy_wrapper_units(v, arrs): if not any(isinstance(a, YTArray) for a in", "registry=my_units.registry) equiv_dims = em_dimensions.get(my_units.dimensions, None) if equiv_dims == other_units.dimensions: if", "= validate_comparison_units(self, other, 'equal') return super(YTArray, self).__eq__(oth) def __ne__(self, other):", "np.ndarray)): raise RuntimeError(\"YTQuantity values must be numeric\") ret = YTArray.__new__(cls,", "so it can't be None. return False oth = validate_comparison_units(self,", "YTArray subclass. \" \"Received operand types (%s) and (%s)\" %", "an equivalence can be specified to convert to an equivalent", "inside pickle.read() and restores the unit data from the metadata", "the array data.\"\"\" return self.ndarray_view() d = ndview @property def", "= sp[\"temperature\"] >>> c = sp[\"velocity_x\"] >>> yt.savetxt(\"sphere.dat\", [a,b,c], header='My", "memodict=None): \"\"\"copy.deepcopy implementation This is necessary for stdlib deepcopy of", "dimensionless or filled with zeros if not inp.units.is_dimensionless and np.any(ret):", "to some power. Parameters ---------- power : float or dimensionless", "units : Unit object or string The units you want", "= UnitRegistry(lut=unit_lut, add_default_symbols=False) return cls(data, units, registry=registry) # # Start", "the right. \"\"\" # Check that other is a YTArray.", "unit specification, unit symbol object, or astropy units The units", "= validate_numpy_wrapper_units(v, [arr1, arr2]) return v def unorm(data, ord=None, axis=None,", "handle_preserve_units( inps, units, ufunc, ret_class) unit = unit_operator(*units) out_arr =", "return_without_unit, arcsin: return_without_unit, arccos: return_without_unit, arctan: return_without_unit, arctan2: arctan2_unit, arcsinh:", "return_without_unit, arctan: return_without_unit, arctan2: arctan2_unit, arcsinh: return_without_unit, arccosh: return_without_unit, arctanh:", "YTArray # with a unit attribute set to the sympy", "units = self.units**self.shape[axis] else: units = self.units**self.size return super(YTArray, self).prod(axis,", "is silly and defines # hour as \"h\" if unit_str", "unary_operators = ( negative, absolute, rint, sign, conj, exp, exp2,", "= equivalence_registry[equiv]() oneway_or_equivalent = ( conv_unit.has_equivalent(equiv) or this_equiv._one_way) if self.has_equivalent(equiv)", "element of the returned tuple, itself a tuple used to", "**kwargs) @classmethod def from_pint(cls, arr, unit_registry=None): \"\"\" Convert a Pint", "# under consideration, convert so we don't mix units with", "written at the beginning of the file, before the unit", "\\ add, subtract, multiply, divide, logaddexp, logaddexp2, true_divide, \\ floor_divide,", "if 'out' in kwargs: out_orig = kwargs.pop('out') out = np.asarray(out_orig[0])", "unit information stripped \"\"\" return np.array(self) @classmethod def from_astropy(cls, arr,", "object, or astropy units The units of the array. Powers", "method, *inputs, **kwargs): func = getattr(ufunc, method) if 'out' in", "inputs, out_arr) unit = self._ufunc_registry[context[0]](u) ret_class = type(self) elif ufunc", "Posify the data. \"\"\" # this needs to be defined", "v = np.stack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v def", ": str, optional String or character separating columns. header :", "# The full license is in the file COPYING.txt, distributed", "input_object def sanitize_units_mul(this_object, other_object): inp = coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object)", "object. # don't handle all the cases here, let the", "be \" \"dimensionless!\") units = [\"dimensionless\"]*num_cols arrays = np.loadtxt(fname, dtype=dtype,", "+ ' ' + str(self.units) # # Start unit conversion", "or newer than 1.13 def __array_ufunc__(self, ufunc, method, *inputs, **kwargs):", "found, uses np.float64 bypass_validation : boolean If True, all input", "ret_class def handle_preserve_units(inps, units, ufunc, ret_class): if units[0] != units[1]:", "+ b 201.0 cm >>> b + a 2.01 m", "1.07918124605 YTQuantity is tightly integrated with yt datasets: >>> import", "unit_array def __getitem__(self, item): ret = super(YTArray, self).__getitem__(item) if ret.shape", "import \\ default_unit_symbol_lut from yt.units.equivalencies import equivalence_registry from yt.utilities.logger import", "units, out, out_arr) else: raise RuntimeError( \"Support for the %s", "(comparison_unit, arctan2_unit): inps, units = handle_comparison_units( inps, units, ufunc, ret_class)", "This is a wrapper around np.stack that preserves units. \"\"\"", "we serialized the unit symbol lookup table # into the", "super(YTArray, self).prod(axis, dtype, out), units @return_arr def mean(self, axis=None, dtype=None,", "np.any(this_object): return ret raise YTUnitOperationError(op_string, inp.units, ret.units) ret = ret.in_units(inp.units)", "finfo.units if isinstance(x, YTArray): arr = copy.deepcopy(x) arr.convert_to_units(units) return arr", "True \"\"\" def __new__(cls, input_scalar, input_units=None, registry=None, dtype=np.float64, bypass_validation=False): if", "avoids creating a YTQuantity with # size > 1 out_arr", "out_arr def copy(self, order='C'): return type(self)(np.copy(np.asarray(self)), self.units) def __array_finalize__(self, obj):", "or equal to other. \"\"\" # Check that the other", "= Unit(str(input_array.units), registry=registry) ret.units = units elif isinstance(input_units, Unit): ret.units", "def list_equivalencies(self): \"\"\" Lists the possible equivalencies associated with this", "unit**-1 def passthrough_unit(unit, unit2=None): return unit def return_without_unit(unit, unit2=None): return", "the input data, or, if none is found, uses np.float64", "in (np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls2, (numeric_type, np.number, list, tuple)):", "'') if 'unit_registry' in dataset.attrs.keys(): unit_lut = pickle.loads(dataset.attrs['unit_registry'].tostring()) else: unit_lut", "correct (same dimension) units. \"\"\" ro = sanitize_units_add(self, right_object, \"addition\")", "for line in f.readlines(): words = line.strip().split() if len(words) ==", "in (multiply, divide) and method == 'reduce': power_sign = POWER_SIGN_MAPPING[ufunc]", "create and write a dataset to dataset_name: string The name", "0. , 0.30103 , 0.47712125, 0.60205999, 0.69897 , 0.77815125, 0.84509804])", "1: return YTQuantity(np.array(out_arr), unit) else: if ret_class is YTQuantity: #", "multiply, divide, logaddexp, logaddexp2, true_divide, \\ floor_divide, negative, power, remainder,", "and (%s)\" % (cls1, cls2)) def loadtxt(fname, dtype='float', delimiter='\\t', usecols=None,", "getattr(inp1, 'units', None) unit2 = getattr(inp2, 'units', None) ret_class =", "not this.units.same_dimensions_as(other.units): raise YTUnitOperationError(op_string, this.units, other.units) return other.in_units(this.units) return other", "None: unit = u**(power_sign*inp.shape[kwargs['axis']]) else: unit = u**(power_sign*inp.size) else: unit", "validate_comparison_units(self, other, 'equal') return super(YTArray, self).__eq__(oth) def __ne__(self, other): \"\"\"", "all the cases here, let the Unit class handle if", "arrs): if not any(isinstance(a, YTArray) for a in arrs): return", ">>> import yt >>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a =", "iterable(obj): try: len(obj) except: return False return True def return_arr(func):", "\"\"\" \"\"\" return str(self.view(np.ndarray)) + ' ' + str(self.units) #", "if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) else: if raise_error: raise", "copy.deepcopy(self.units)) class YTQuantity(YTArray): \"\"\" A scalar associated with a unit.", "def __rfloordiv__(self, left_object): \"\"\" See __div__. \"\"\" lo = sanitize_units_mul(self,", "LooseVersion('1.10.0'): norm = np.linalg.norm(data, ord=ord, axis=axis) else: norm = np.linalg.norm(data,", "not None: units = self.units**self.shape[axis] else: units = self.units**self.size return", "class handle if # it's a str. units = Unit(input_units,", "The units you want to convert to. \"\"\" new_units =", "ufunc, ret_class) elif unit_operator is preserve_units: inps, units = handle_preserve_units(", "out=None): return super(YTArray, self).sum(axis, dtype, out), self.units @return_arr def std(self,", "h5py.File(filename) if group_name is not None: g = f[group_name] else:", "is a YTArray. if hasattr(other, 'units'): if this.units.expr is other.units.expr:", "in (preserve_units, comparison_unit, arctan2_unit): inps, units = handle_comparison_units( inps, units,", "unit_system : string, optional The unit system to be used", "is None: unit_registry = UnitRegistry() powers_dict = self.units.expr.as_powers_dict() units =", "\"\"\" v = np.vstack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v", "a in arrs): raise RuntimeError(\"Not all of your arrays are", "data converted to cgs units. \"\"\" return self.in_units(self.units.get_cgs_equivalent()) def in_mks(self):", "self).__rfloordiv__(lo) def __ifloordiv__(self, other): \"\"\" See __div__. \"\"\" oth =", "function for details. \"\"\" return self.in_units(units, equivalence=equivalence, **kwargs) def to_value(self,", "other, out=self) return self def __and__(self, right_object): return super(YTArray, self).__and__(right_object)", "str, optional The character used to indicate the start of", "unit) if out is not None: out_orig[0].flat[:] = out.flat[:] if", "to. arrays : list of YTArrays or single YTArray The", "The units of the array. Powers must be specified using", "equal, logical_and, logical_or, logical_xor, maximum, minimum, fmax, fmin, copysign, nextafter,", "b + a 2.01 m NumPy ufuncs will pass through", "ro = sanitize_units_add(self, right_object, \"addition\") return super(YTArray, self).__add__(ro) def __radd__(self,", "yt.units.dimensions import \\ angle, \\ current_mks, \\ dimensionless, \\ em_dimensions", "The scalar to attach units to input_units : String unit", "passthrough_unit, spacing: passthrough_unit, positive: passthrough_unit, divmod_: passthrough_unit, isnat: return_without_unit, heaviside:", "documentation of numpy.concatenate for full details. Examples -------- >>> A", "dot.shape == (): return YTQuantity(dot, units) return YTArray(dot, units) def", "the dataset. group_name: string An optional group to write the", "ytarray to the object on the right of the `+`", "arrays must have the same units. See the documentation of", "pickle. \"\"\" super(YTArray, self).__setstate__(state[1:]) try: unit, lut = state[0] except", "return ret ufunc = context[0] inputs = context[1] if ufunc", "ret, units = func(*args, **kwargs) if ret.shape == (): return", "data, but can lead to significant speedups in the input", "return input_array.view(cls) if registry is None and isinstance(input_units, (str, bytes)):", "must be numeric\") ret = YTArray.__new__(cls, input_scalar, input_units, registry, dtype=dtype,", "with %i inputs has not been\" \"added to YTArray.\" %", "\"\"\"Stack arrays in sequence horizontally (column wise) while preserving units", "equal to the object on the right. \"\"\" oth =", "that preserves units. All input arrays must have the same", "left_object) return super(YTArray, self).__rdiv__(lo) def __idiv__(self, other): \"\"\" See __div__.", "return super(YTArray, self).__rtruediv__(lo) def __itruediv__(self, other): \"\"\" See __div__. \"\"\"", "into the array, but as an ndarray rather than ytarray.", "ret_class(np.array(out_arr, copy=False), unit) else: # numpy version equal to or", "to the file. fmt : str or sequence of strs,", "to convert to. equiv : string The equivalence you wish", "units to the equivalent base units in the specified unit", "def ustack(arrs, axis=0): \"\"\"Join a sequence of arrays along a", "other): \"\"\" Test if this is not equal to the", "3, 2, 1, 0, 1, 2, 3]) g/cm**3 and strip", "dimension. This is a wrapper around np.stack that preserves units.", "to attach units to input_units : String unit specification, unit", "hypot: preserve_units, deg2rad: return_without_unit, rad2deg: return_without_unit, bitwise_and: bitop_units, bitwise_or: bitop_units,", "it is compatible with this quantity. Returns Unit object. \"\"\"", "= sanitize_units_mul(self, right_object) return super(YTArray, self).__floordiv__(ro) def __rfloordiv__(self, left_object): \"\"\"", "read. comments : str, optional The character used to indicate", "\"subtraction\") return super(YTArray, self).__rsub__(lo) def __isub__(self, other): \"\"\" See __sub__.", "E.in_base(unit_system=\"galactic\") \"\"\" return self.in_units(self.units.get_base_equivalent(unit_system)) def in_cgs(self): \"\"\" Creates a copy", "from pint import UnitRegistry >>> import numpy as np >>>", "units is None: v = self.value else: v = self.in_units(units,", "not supplied, the default one will be used. Examples --------", "as divmod_, isnat, heaviside except ImportError: positive, divmod_, isnat, heaviside", "d.dtype == self.dtype: d[...] = self for k in d.attrs.keys():", "return type(self)(ret, copy.deepcopy(self.units)) class YTQuantity(YTArray): \"\"\" A scalar associated with", "unit, lut = str(state[0]), default_unit_symbol_lut.copy() # need to fix up", "object on the right of the `+` operator. Must check", "0.69897 , 0.77815125, 0.84509804]) YTArray is tightly integrated with yt", "axisb=axisb, axisc=axisc, axis=axis) units = arr1.units * arr2.units arr =", "self).__xor__(right_object) def __rxor__(self, left_object): return super(YTArray, self).__rxor__(left_object) def __ixor__(self, other):", "We first cast to be our class type obj =", "attributes to the dataset. group_name: string An optional group to", "passthrough_unit, power: power_unit, remainder: preserve_units, mod: preserve_units, fmod: preserve_units, absolute:", "optional group to read the arrays from. If not specified,", "4, 'g/cm**3') >>> np.abs(a) YTArray([4, 3, 2, 1, 0, 1,", "UnitRegistry from yt.units.dimensions import \\ angle, \\ current_mks, \\ dimensionless,", "do this, super(YTArray, self).__pow__ returns a YTArray # with a", "B = yt.YTArray([2, 3, 4], 'cm') >>> uunion1d(A, B) YTArray([", "supplied, the default one will be used. NOTE: This is", "typed=False) def multiply_units(unit1, unit2): return unit1 * unit2 def preserve_units(unit1,", "= Unit(other_units, registry=my_units.registry) equiv_dims = em_dimensions.get(my_units.dimensions, None) if equiv_dims ==", "is a wrapper around np.vstack that preserves units. \"\"\" v", "YTArray.in_units(). See the docstrings of that function for details. \"\"\"", "beginning of line: \\\"%s\\\".\" % line[0]) f.close() if len(units) !=", "To see which equivalencies are supported for this unitful quantity,", "arrays. A wrapper around numpy.intersect1d that preserves units. All input", "\"to YTArray.\" % str(context[0])) if unit is None: out_arr =", "isreal, iscomplex, isfinite, isinf, isnan, signbit, copysign, nextafter, \\ modf,", "_astropy.units is None: raise ImportError(\"You don't have AstroPy installed, so", "left_object): return super(YTArray, self).__rand__(left_object) def __iand__(self, other): np.bitwise_and(self, other, out=self)", "scalar associated with a unit. Parameters ---------- input_scalar : an", "def __pow__(self, power): \"\"\" Raise this YTArray to some power.", "not been added \" \"to YTArray.\" % str(context[0])) if unit", "'cm') >>> b = YTArray([4, 5, 6], 'm') >>> a", "or string, optional The units you want to get the", "np.asarray(input_array, dtype=dtype).view(cls) # Check units type if input_units is None:", "using python syntax (cm**3, not cm^3). registry : A UnitRegistry", "velx = yt.loadtxt(\"sphere.dat\", usecols=(1,2), delimiter=\"\\t\") \"\"\" f = open(fname, 'r')", "'unit_registry' in dataset.attrs.keys(): unit_lut = pickle.loads(dataset.attrs['unit_registry'].tostring()) else: unit_lut = None", "'out' in kwargs: out_orig = kwargs.pop('out') out = np.asarray(out_orig[0]) else:", "np.random.random(10) >>> b = ureg.Quantity(a, \"erg/cm**3\") >>> c = yt.YTArray.from_pint(b)", "__ifloordiv__(self, other): \"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self, other)", "want to convert to. \"\"\" new_units = _unit_repr_check_same(self.units, units) (conversion_factor,", "AstroPy \"Quantity\" to a YTArray or YTQuantity. Parameters ---------- arr", "out is not None: out_orig[0].flat[:] = out.flat[:] if isinstance(out_orig[0], YTArray):", "v = self.value else: v = self.in_units(units, equivalence=equivalence, **kwargs).value if", "units to the equivalent mks units. \"\"\" return self.convert_to_units(self.units.get_mks_equivalent()) def", "if registry and registry is not input_units.registry: units = Unit(str(input_units),", "units): \"\"\" Convert the array and units to the given", "the same unit and shape as this array\"\"\" return np.ones_like(self)", "symbol object, or astropy units The units of the array.", "oth = sanitize_units_mul(self, other) np.multiply(self, oth, out=self) return self def", "__new__(cls, input_scalar, input_units=None, registry=None, dtype=np.float64, bypass_validation=False): if not isinstance(input_scalar, (numeric_type,", "= self._ufunc_registry[context[0]] inps, units, ret_class = get_inp_u_binary(ufunc, inputs) if unit_operator", "\"\"\" # Check that the other is a YTArray. oth", "that the other is a YTArray. oth = validate_comparison_units(self, other,", "axisa=axisa, axisb=axisb, axisc=axisc, axis=axis) units = arr1.units * arr2.units arr", "in d.attrs.keys(): del d.attrs[k] else: del f[dataset_name] d = g.create_dataset(dataset_name,", "def iterable(obj): try: len(obj) except: return False return True def", "in the dimensions of the result. For example, if ``axis=0``", "def __deepcopy__(self, memodict=None): \"\"\"copy.deepcopy implementation This is necessary for stdlib", "if ret.size > 1: raise RuntimeError(\"YTQuantity instances must be scalars\")", "def __le__(self, other): \"\"\"Test if this is less than or", "can get away with it. if d.shape == self.shape and", "numpy as np >>> ureg = UnitRegistry() >>> a =", "All additional keyword arguments are passed to the equivalency, which", "prod(self, axis=None, dtype=None, out=None): if axis is not None: units", "if unit is None: out_arr = np.array(out_arr, copy=False) return out_arr", "the top level by default. Examples -------- >>> a =", "'}') registry = UnitRegistry(lut=lut, add_default_symbols=False) self.units = Unit(unit, registry=registry) def", "---------- input_array : :obj:`!iterable` A tuple, list, or array to", "\"\"\" return self.in_units(units, equivalence=equivalence, **kwargs) def to_value(self, units=None, equivalence=None, **kwargs):", "np.ma.masked_array) or issubclass(cls2, (numeric_type, np.number, list, tuple)): return cls1 if", "a new axis while preserving units The axis parameter specifies", "raise YTUnitOperationError(ufunc, unit1, unit2) unit2 = 1.0 return (inp1, inp2),", "units = [] for unit, pow in powers_dict.items(): # we", "cosh, tanh, arcsinh, arccosh, arctanh, deg2rad, rad2deg, \\ bitwise_and, bitwise_or,", "offset*self.uq, self) return self def convert_to_base(self, unit_system=\"cgs\"): \"\"\" Convert the", "= g[dataset_name] data = dataset[:] units = dataset.attrs.get('units', '') if", "Test if this is greater than or equal to other.", "None if len(inputs) == 1: _, inp, u = get_inp_u_unary(ufunc,", "of cgs are used. Examples -------- >>> E = YTQuantity(2.5,", "tanh, arcsinh, arccosh, arctanh, deg2rad, rad2deg, invert, logical_not, isreal, iscomplex,", "list): arrays = [arrays] units = [] for array in", "to other. \"\"\" # Check that the other is a", "point scalar The scalar to attach units to input_units :", "in the iterable. return YTArray(input_object) return input_object else: return input_object", "be written at the beginning of the file, before the", "consideration, convert so we don't mix units with the same", "for o in out_arr)) elif out_arr.size == 1: out_arr =", "The character used to indicate the start of a comment;", "cPickle as pickle if dataset_name is None: dataset_name = 'array_data'", "def __or__(self, right_object): return super(YTArray, self).__or__(right_object) def __ror__(self, left_object): return", "ds.quan(5, 'code_length') >>> a.in_cgs() 1.543e+25 cm This is equivalent to:", "equivalence can be specified to convert to an equivalent quantity", "if equivalence is None: new_units = _unit_repr_check_same(self.units, units) (conversion_factor, offset)", "expression 1/1 rather than # a dimensionless Unit object. if", "operators. # def __lt__(self, other): \"\"\" Test if this is", "import yt >>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.arr(np.ones(5),", "@property def unit_array(self): \"\"\"Get a YTArray filled with ones with", "preserve_units, minimum: preserve_units, fmax: preserve_units, fmin: preserve_units, isreal: return_without_unit, iscomplex:", "in. equivalence : string, optional The equivalence you wish to", "Pint Quantity. Parameters ---------- arr : YTArray or YTQuantity The", "any([ff != getattr(_, 'units', NULL_UNIT) for _ in input_object]): raise", "state of the ndarray. This is always defined for numpy", "a YTArray and has the same dimensions as the object", ">>> b = YTQuantity(5, 'code_length', registry=ds.unit_registry) >>> np.all(a == b)", "conv_unit = Unit(unit, registry=self.units.registry) if self.units.same_dimensions_as(conv_unit): return self.in_units(conv_unit) this_equiv =", "a = ds.arr(np.ones(5), 'code_length') >>> a.in_cgs() YTArray([ 3.08600000e+24, 3.08600000e+24, 3.08600000e+24,", "super(YTArray, self).__eq__(oth) def __ne__(self, other): \"\"\" Test if this is", "use. To see which equivalencies are supported for this unitful", "group to read the arrays from. If not specified, the", "= [] for base, exponent in zip(u.bases, u.powers): unit_str =", "None Returns ------- NumPy array \"\"\" if units is None:", "This is always defined for numpy arrays. \"\"\" np_ret =", "The dtype of the array data. Defaults to the dtype", "specified to convert to an equivalent quantity which is not", "# let Unit() handle units arg if it's not already", "unit_operator(*units) if unit_operator in (multiply_units, divide_units): out_arr, out_arr, unit =", "header. footer : str, optional String that will be written", "YTUnitOperationError(ufunc, *units) else: if raise_error: raise YTUfuncUnitError(ufunc, *units) inps =", "ret_class, raise_error=False): if units[0] != units[1]: u1d = units[0].is_dimensionless u2d", ">>> a = ds.quan(5, 'code_length') >>> a.in_cgs() 1.543e+25 cm This", "right_object): return super(YTArray, self).__and__(right_object) def __rand__(self, left_object): return super(YTArray, self).__rand__(left_object)", "footer='', comments='#'): r\"\"\" Write YTArrays with unit information to a", "to be defined for all numpy versions, see # numpy", "np >>> a = YTArray(np.arange(8) - 4, 'g/cm**3') >>> np.abs(a)", "new_arr = this_equiv.convert( self, conv_unit.dimensions, **kwargs) if isinstance(new_arr, tuple): try:", "lead to significant speedups in the input validation logic adds", "def sqrt_unit(unit): return unit**0.5 @lru_cache(maxsize=128, typed=False) def multiply_units(unit1, unit2): return", "maximum: preserve_units, minimum: preserve_units, fmax: preserve_units, fmin: preserve_units, isreal: return_without_unit,", "if it's not already a Unit obj. if not isinstance(other_units,", "the correct (same dimension) units. \"\"\" ro = sanitize_units_add(self, right_object,", "header. Arrays will be \" \"dimensionless!\") units = [\"dimensionless\"]*num_cols arrays", "super(YTArray, self).__add__(ro) def __radd__(self, left_object): \"\"\" See __add__. \"\"\" lo", "do this because AstroPy is silly and defines # hour", "sanitize_units_mul(self, other) np.multiply(self, oth, out=self) return self def __div__(self, right_object):", "YTInvalidUnitEquivalence, YTEquivalentDimsError from yt.utilities.lru_cache import lru_cache from numbers import Number", "not isinstance(arrays, list): arrays = [arrays] units = [] for", "(inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units def handle_multiply_divide_units(unit, units, out,", "is not supplied, the default one will be used. NOTE:", "(inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units def handle_comparison_units(inps, units, ufunc,", "right of the `-` from this ytarray. Must check for", "YTQuantity has an equivalent unit in *equiv*. \"\"\" return self.units.has_equivalent(equiv)", "issubclass(cls1, cls2): return cls1 if issubclass(cls2, cls1): return cls2 else:", "\"\"\" return self.view(np.ndarray) def to_ndarray(self): \"\"\" Creates a copy of", "RuntimeError(\"YTQuantity values must be numeric\") ret = YTArray.__new__(cls, input_scalar, input_units,", "that it is compatible with this quantity. Returns Unit object.", "b) True \"\"\" def __new__(cls, input_scalar, input_units=None, registry=None, dtype=np.float64, bypass_validation=False):", "None: if ret.shape == (): return ret[()] else: return ret", "with the unit object. dtype : data-type The dtype of", "object with data converted to cgs units. \"\"\" return self.in_units(self.units.get_cgs_equivalent())", "equivalent to: >>> b = YTQuantity(5, 'code_length', registry=ds.unit_registry) >>> np.all(a", "units. equivalence : string, optional The equivalence you wish to", "for numpy arrays. \"\"\" np_ret = super(YTArray, self).__reduce__() obj_state =", "dtype is None: dtype = getattr(input_array, 'dtype', np.float64) if bypass_validation", "should be used if that particular equivalency requires them. Parameters", "the resulting array; default: float. delimiter : str, optional The", "= ds.arr(np.ones(5), 'code_length') >>> a.in_cgs() YTArray([ 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24,", "input_units) ) if isinstance(input_array, YTArray): ret = input_array.view(cls) if input_units", "return unit*unit @lru_cache(maxsize=128, typed=False) def divide_units(unit1, unit2): return unit1/unit2 @lru_cache(maxsize=128,", "log2: return_without_unit, log10: return_without_unit, expm1: return_without_unit, log1p: return_without_unit, sqrt: sqrt_unit,", "comparison_unit, logical_xor: comparison_unit, logical_not: return_without_unit, maximum: preserve_units, minimum: preserve_units, fmax:", "not defined for YTArray instances\") def get_inp_u_unary(ufunc, inputs, out_arr=None): inp", "deleting if we can get away with it. if d.shape", "np.any(other_object): return ret.view(np.ndarray) elif not np.any(this_object): return ret raise YTUnitOperationError(op_string,", "import UnitRegistry from yt.units.dimensions import \\ angle, \\ current_mks, \\", "3.08600000e+24, 3.08600000e+24]) cm This is equivalent to: >>> b =", "issubclass(cls2, YTQuantity): return cls1 if issubclass(cls1, cls2): return cls1 if", "return input_object if iterable(input_object): if any([isinstance(o, YTArray) for o in", "See the documentation of numpy.intersect1d for full details. Examples --------", "v def uhstack(arrs): \"\"\"Stack arrays in sequence horizontally (column wise)", "object on the right. \"\"\" # Check that the other", "return super(YTArray, self).__truediv__(ro) def __rtruediv__(self, left_object): \"\"\" See __div__. \"\"\"", "+ str(self.units) # # Start unit conversion methods # def", ": boolean If True, all input validation is skipped. Using", "Examples -------- >>> E = YTQuantity(2.5, \"erg/s\") >>> E.convert_to_base(unit_system=\"galactic\") \"\"\"", "return np.array(self) v = value @property def ndview(self): \"\"\"Get a", "= False units = [] num_cols = -1 for line", "[] num_cols = -1 for line in f.readlines(): words =", "with # size > 1 out_arr = YTArray(np.asarray(out_arr), unit) else:", "not supplied, the default one will be used. NOTE: This", "have to do this because Pint doesn't recognize # \"yr\"", "if this is less than the object on the right.", "units, and returns it. Returns ------- Quantity object with data", "*units) inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units def", "input_units=None, registry=None, dtype=None, bypass_validation=False): if dtype is None: dtype =", "return cls(data, units, registry=registry) # # Start convenience methods #", "tuple([YTArray(arr, unit) for arr, unit in zip(arrays, units)]) def savetxt(fname,", "added \" \"to YTArray.\" % str(context[0])) if unit is None:", "arr.unit ap_units = [] for base, exponent in zip(u.bases, u.powers):", "inps, units = handle_comparison_units( inps, units, ufunc, ret_class, raise_error=True) unit", "return ret def sanitize_units_add(this_object, other_object, op_string): inp = coerce_iterable_units(this_object) ret", "'cm') >>> myinfo = {'field':'dinosaurs', 'type':'field_data'} >>> a.write_hdf5('test_array_data.h5', dataset_name='dinosaurs', ...", "Start comparison operators. # def __lt__(self, other): \"\"\" Test if", "== np.bool_(False): units = (units[0], units[0]) else: if not units[0].same_dimensions_as(units[1]):", "ufunc in unary_operators: out_arr, inp, u = get_inp_u_unary(ufunc, inputs, out_arr)", "tuple): finfo = data.ds._get_field_info(field[0],field[1]) else: finfo = data.ds._get_field_info(field) if finfo.sampling_type", "not already a Unit obj. if not isinstance(other_units, Unit): other_units", "my_units.same_dimensions_as(other_units): raise YTUnitConversionError( my_units, my_units.dimensions, other_units, other_units.dimensions) return other_units unary_operators", "\"Support for the %s ufunc has not been added \"", "integer or floating point scalar The scalar to attach units", "is not supplied, the default one will be used. Examples", "RuntimeError( \"Support for the %s ufunc with %i inputs has", "None: u = NULL_UNIT if u.dimensions is angle and ufunc", "out_arr, unit def coerce_iterable_units(input_object): if isinstance(input_object, np.ndarray): return input_object if", "return unit1 * unit2 def preserve_units(unit1, unit2=None): return unit1 @lru_cache(maxsize=128,", "\"\"\" Convert the array and units to the equivalent mks", "optional The unit system to be used in the conversion.", "issubclass(cls1, YTQuantity): return cls2 if issubclass(cls2, YTQuantity): return cls1 if", "v def in_base(self, unit_system=\"cgs\"): \"\"\" Creates a copy of this", "in the conversion. If one is not supplied, the default", "is None: out_arr = np.array(out_arr, copy=False) return out_arr out_arr.units =", "obj.units.registry = registry return obj if input_array is NotImplemented: return", "= context[0] inputs = context[1] if ufunc in unary_operators: out_arr,", "or vector dot product that preserves units This is a", "square, reciprocal, sin, cos, tan, arcsin, arccos, arctan, sinh, cosh,", "divmod_: passthrough_unit, isnat: return_without_unit, heaviside: preserve_units, } __array_priority__ = 2.0", "__imul__(self, other): \"\"\" See __mul__. \"\"\" oth = sanitize_units_mul(self, other)", "convert to. equiv : string The equivalence you wish to", "u**(power_sign*inp.size) else: unit = self._ufunc_registry[ufunc](u) ret_class = type(self) elif len(inputs)", "be prepended to the ``header`` and ``footer`` strings, to mark", "def udot(op1, op2): \"\"\"Matrix or vector dot product that preserves", ">>> temp, velx = yt.loadtxt(\"sphere.dat\", usecols=(1,2), delimiter=\"\\t\") \"\"\" f =", "else: units = input_units else: # units kwarg set, but", "Unit(str(input_array.units), registry=registry) ret.units = units elif isinstance(input_units, Unit): ret.units =", "will be written at the end of the file. comments", "= (None,)*4 from yt.units.unit_object import Unit, UnitParseError from yt.units.unit_registry import", "registry=registry) else: units = input_units else: # units kwarg set,", "-1 for line in f.readlines(): words = line.strip().split() if len(words)", "standard library pickle module: http://docs.python.org/2/library/pickle.html Unit metadata is encoded in", "other): \"\"\"Test if this is less than or equal to", "two arrays. A wrapper around numpy.intersect1d that preserves units. All", "d = ndview @property def unit_quantity(self): \"\"\"Get a YTQuantity with", "ret = coerce_iterable_units(other_object) # Make sure the other object is", "mod, arctan2, hypot, bitwise_and, bitwise_or, bitwise_xor, left_shift, right_shift, greater, greater_equal,", "a = YTArray(np.arange(8) - 4, 'g/cm**3') >>> np.abs(a) YTArray([4, 3,", "if ret_class is YTQuantity: # This happens if you do", "Pint UnitRegistry to use in the conversion. If one is", "== 'particle': units = finfo.output_units else: units = finfo.units if", "left_object): return super(YTArray, self).__rxor__(left_object) def __ixor__(self, other): np.bitwise_xor(self, other, out=self)", "num_cols = len(col_words) break except ValueError: mylog.warning(\"Unrecognized character at beginning", "bitop_units, bitwise_or: bitop_units, bitwise_xor: bitop_units, invert: invert_units, left_shift: bitop_units, right_shift:", "arr def uintersect1d(arr1, arr2, assume_unique=False): \"\"\"Find the sorted unique elements", "input_scalar, input_units, registry, dtype=dtype, bypass_validation=bypass_validation) if ret.size > 1: raise", "return self.in_units(self.units.get_base_equivalent(unit_system)) def in_cgs(self): \"\"\" Creates a copy of this", "we catch the first line of numbers try: col_words =", "return super(YTArray, self).__rdiv__(lo) def __idiv__(self, other): \"\"\" See __div__. \"\"\"", "numpy arrays. \"\"\" np_ret = super(YTArray, self).__reduce__() obj_state = np_ret[2]", "col_words = line.strip().split(delimiter) for word in col_words: float(word) num_cols =", "raise RuntimeError(\"YTQuantity instances must be scalars\") return ret def __repr__(self):", "a YTArray to hdf5 file. Parameters ---------- filename: string The", "bitwise_and: bitop_units, bitwise_or: bitop_units, bitwise_xor: bitop_units, invert: invert_units, left_shift: bitop_units,", "if ufunc in (multiply, divide) and method == 'reduce': power_sign", "a.write_hdf5('test_array_data.h5', dataset_name='dinosaurs', ... info=myinfo) \"\"\" from yt.utilities.on_demand_imports import _h5py as", "= units return obj def __repr__(self): \"\"\" \"\"\" return super(YTArray,", "return_without_unit(unit, unit2=None): return None def arctan2_unit(unit1, unit2): return NULL_UNIT def", "the registry associated with the unit object. dtype : data-type", "greater, greater_equal, less, less_equal, not_equal, equal, logical_and, logical_or, logical_xor, maximum,", "or array data, but can lead to significant speedups in", "unit) for o in out_arr)) elif out_arr.size == 1: out_arr", "unit that you wish to convert to. equiv : string", "multiply, divide, logaddexp, logaddexp2, true_divide, power, remainder, mod, arctan2, hypot,", "super(YTArray, self).__rdiv__(lo) def __idiv__(self, other): \"\"\" See __div__. \"\"\" oth", ">>> B = yt.YTArray([2, 3, 4], 'cm') >>> uunion1d(A, B)", "= _unit_repr_check_same(self.units, units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units) new_array = type(self)(self.ndview", "the other object is not a YTArray, then one of", "\"\"\" v = np.cross(arr1, arr2, axisa=axisa, axisb=axisb, axisc=axisc, axis=axis) units", "the dataset to read from. If the dataset has a", "\"units\"): units.append(str(array.units)) else: units.append(\"dimensionless\") if header != '': header +=", "sanitize_units_mul(self, right_object) return super(YTArray, self).__mul__(ro) def __rmul__(self, left_object): \"\"\" See", "additional keyword arguments are passed to the equivalency, which should", "return_without_unit, maximum: preserve_units, minimum: preserve_units, fmax: preserve_units, fmin: preserve_units, isreal:", "Returns ------- YTArray \"\"\" if equivalence is None: new_units =", "oth, out=self) return self def __sub__(self, right_object): \"\"\" Subtract the", "= (units[0], units[0]) elif not any([u1d, u2d]): if not units[0].same_dimensions_as(units[1]):", "\"\"\" \"\"\" return super(YTArray, self).__repr__()+' '+self.units.__repr__() def __str__(self): \"\"\" \"\"\"", "return v def uunion1d(arr1, arr2): \"\"\"Find the union of two", "a view into the array, but as an ndarray rather", "\"\"\" An ndarray subclass that attaches a symbolic unit object", "other) np.true_divide(self, oth, out=self) return self def __floordiv__(self, right_object): ro", "Returns ------- NumPy array \"\"\" if units is None: v", "_unit_repr_check_same(self.units, units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units) new_array = type(self)(self.ndview *", "iterable. return YTArray(input_object) return input_object else: return input_object def sanitize_units_mul(this_object,", ": data-type The dtype of the array data. Defaults to", ">>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.quan(5, 'code_length') >>>", "iscomplex, isfinite, isinf, isnan, signbit, copysign, nextafter, \\ modf, ldexp,", "units. See the documentation of numpy.cross for full details. \"\"\"", "ret_class = type(self) elif ufunc in binary_operators: unit_operator = self._ufunc_registry[context[0]]", "group_name=None): r\"\"\"Attempts read in and convert a dataset in an", "trunc, fabs, spacing try: # numpy 1.13 or newer from", "def sanitize_units_add(this_object, other_object, op_string): inp = coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object)", "on the right of the `+` operator. Must check for", "self).__gt__(oth) # # End comparison operators # # # Begin", "in arrays: if hasattr(array, \"units\"): units.append(str(array.units)) else: units.append(\"dimensionless\") if header", "Parameters ---------- filename: string The filename to of the hdf5", "datasets at the top level by default. \"\"\" import h5py", "to load an old pickle file # created before we", "out), units @return_arr def mean(self, axis=None, dtype=None, out=None): return super(YTArray,", "converted to mks units. \"\"\" return self.in_units(self.units.get_mks_equivalent()) def to_equivalent(self, unit,", "\"\"\" np_ret = super(YTArray, self).__reduce__() obj_state = np_ret[2] unit_state =", "Parameters ---------- arr : AstroPy Quantity The Quantity to convert", "\"\"\" See __sub__. \"\"\" lo = sanitize_units_add(self, left_object, \"subtraction\") return", "sinh, cosh, tanh, arcsinh, arccosh, arctanh, deg2rad, rad2deg, invert, logical_not,", "super(YTArray, self).__floordiv__(ro) def __rfloordiv__(self, left_object): \"\"\" See __div__. \"\"\" lo", "[] for array in arrays: if hasattr(array, \"units\"): units.append(str(array.units)) else:", "frexp, fabs, spacing, positive, isnat, ) binary_operators = ( add,", "NULL_UNIT) def __pos__(self): \"\"\" Posify the data. \"\"\" # this", "oth, out=self) return self def __div__(self, right_object): \"\"\" Divide this", "ret_class): if units[0] != units[1]: any_nonzero = [np.any(inps[0]), np.any(inps[1])] if", "---------- fname : str Filename to read. dtype : data-type,", "not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) else: if raise_error: raise YTUfuncUnitError(ufunc,", "input_scalar, input_units=None, registry=None, dtype=np.float64, bypass_validation=False): if not isinstance(input_scalar, (numeric_type, np.number,", "log, log2, log10, expm1, log1p, sqrt, square, reciprocal, sin, cos,", "\"\"\" Convert an AstroPy \"Quantity\" to a YTArray or YTQuantity.", "sqrt: sqrt_unit, square: square_unit, reciprocal: reciprocal_unit, sin: return_without_unit, cos: return_without_unit,", "m NumPy ufuncs will pass through units where appropriate. >>>", "a YTArray subclass. \" \"Received operand types (%s) and (%s)\"", "= str(state[0]), default_unit_symbol_lut.copy() # need to fix up the lut", "\"\"\" Multiply this YTArray by the object on the right", "units. All input arrays must have the same units. See", "u = arr.unit ap_units = [] for base, exponent in", "v = validate_numpy_wrapper_units(v, arrs) return v def ucross(arr1, arr2, registry=None,", "none is found, uses np.float64 bypass_validation : boolean If True,", "Returns ------- Quantity object with data converted to cgs units.", "object. if self.units.is_dimensionless and power == -1: ret = super(YTArray,", "a = sp[\"density\"] >>> b = sp[\"temperature\"] >>> c =", "self.units) @return_arr def dot(self, b, out=None): return super(YTArray, self).dot(b), self.units*b.units", "YTArray) for a in arrs): raise RuntimeError(\"Not all of your", "out=None): return super(YTArray, self).mean(axis, dtype, out), self.units @return_arr def sum(self,", "= open(fname, 'r') next_one = False units = [] num_cols", "exp, exp2, log, log2, log10, expm1, log1p, sqrt, square, \\", "preserves units This is a wrapper around np.linalg.norm that preserves", "= [] for unit, pow in powers_dict.items(): # we have", "units = finfo.units if isinstance(x, YTArray): arr = copy.deepcopy(x) arr.convert_to_units(units)", "default one will be used. \"\"\" # Converting from AstroPy", "input_array is NotImplemented: return input_array.view(cls) if registry is None and", "f = open(fname, 'r') next_one = False units = []", "associated with this YTArray or YTQuantity. \"\"\" self.units.list_equivalencies() def has_equivalent(self,", ": String unit specification, unit symbol object, or astropy units", "as expected by e.g. ``yt.loadtxt``. Examples -------- >>> sp =", "cgs are used. Examples -------- >>> E = YTQuantity(2.5, \"erg/s\")", "be used in the conversion. If not specified, the default", "divide_units(unit1, unit2): return unit1/unit2 @lru_cache(maxsize=128, typed=False) def reciprocal_unit(unit): return unit**-1", "**kwargs) if ufunc in (multiply, divide) and method == 'reduce':", "YTQuantity. Parameters ---------- arr : Pint Quantity The Quantity to", "super(YTArray, self).__and__(right_object) def __rand__(self, left_object): return super(YTArray, self).__rand__(left_object) def __iand__(self,", "TypeError( \"Bit-twiddling operators are not defined for YTArray instances\") def", "will extract the 2nd, 5th and 6th columns. The default,", "YTArray is tightly integrated with yt datasets: >>> import yt", "def convert_to_cgs(self): \"\"\" Convert the array and units to the", "len(inputs) == 1: _, inp, u = get_inp_u_unary(ufunc, inputs) out_arr", "def __div__(self, right_object): \"\"\" Divide this YTArray by the object", "unit object. Defaults to False. Examples -------- >>> from yt", "you want to get a new quantity in. equivalence :", "super(YTArray, self).__rmul__(lo) def __imul__(self, other): \"\"\" See __mul__. \"\"\" oth", "dtype, out, ddof), self.units def __array_wrap__(self, out_arr, context=None): ret =", "registry and this is specified, this will be used instead", "this is equal to the object on the right. \"\"\"", "the beginning of the file, before the unit header. footer", "YTQuantity. Parameters ---------- arr : AstroPy Quantity The Quantity to", "is a YTArray. if other is None: return True oth", "zero if not np.any(other_object): return ret.view(np.ndarray) elif not np.any(this_object): return", "1, divide: -1} # redefine this here to avoid a", "g = f if dataset_name in g.keys(): d = g[dataset_name]", "the same unit information. \"\"\" if _astropy.units is None: raise", "Unit() handle units arg if it's not already a Unit", "and ufunc is not power: unit2 = Unit(registry=getattr(unit1, 'registry', None))", "unit_operator in (comparison_unit, arctan2_unit): inps, units = handle_comparison_units( inps, units,", "__mul__(self, right_object): \"\"\" Multiply this YTArray by the object on", "with zero if not np.any(other_object): return ret.view(np.ndarray) elif not np.any(this_object):", "in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- import", "str or sequence of strs, optional A single format (%10.5f),", "def uintersect1d(arr1, arr2, assume_unique=False): \"\"\"Find the sorted unique elements of", "= {} info['units'] = str(self.units) info['unit_registry'] = np.void(pickle.dumps(self.units.registry.lut)) if dataset_name", "handle_comparison_units( inps, units, ufunc, ret_class) elif unit_operator is preserve_units: inps,", "arr, unit in zip(arrays, units)]) def savetxt(fname, arrays, fmt='%.18e', delimiter='\\t',", "is not None: g = f[group_name] else: g = f", "floor, ceil, trunc, modf, frexp, fabs, spacing, positive, isnat, )", "assume_unique=False): \"\"\"Find the sorted unique elements of the two input", "cls2 if cls2 in (np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls2, (numeric_type,", "not_equal, equal, logical_and, logical_or, logical_xor, maximum, minimum, fmax, fmin, copysign,", "equivalence=equivalence, **kwargs).value if isinstance(self, YTQuantity): return float(v) else: return v", ">>> b = ureg.Quantity(a, \"erg/cm**3\") >>> c = yt.YTArray.from_pint(b) \"\"\"", "B) YTArray([ 1., 2., 3., 4.]) cm \"\"\" v =", "validation logic adds significant overhead. If set, input_units *must* be", "deg2rad, rad2deg, \\ bitwise_and, bitwise_or, bitwise_xor, invert, left_shift, right_shift, \\", "or YTQuantity. Parameters ---------- arr : AstroPy Quantity The Quantity", "See the documentation of numpy.concatenate for full details. Examples --------", "units, out_arr, out_arr) else: raise RuntimeError( \"Support for the %s", "new_array) return new_array else: return self.to_equivalent(units, equivalence, **kwargs) def to(self,", "YTInvalidUnitEquivalence(equiv, self.units, unit) else: return new_arr.in_units(unit) else: raise YTInvalidUnitEquivalence(equiv, self.units,", "other, 'less_than') return super(YTArray, self).__lt__(oth) def __le__(self, other): \"\"\"Test if", "== b) True \"\"\" _ufunc_registry = { add: preserve_units, subtract:", "is ignored if the version of numpy installed is older", ">>> a = ds.arr(np.ones(5), 'code_length') >>> a.in_cgs() YTArray([ 3.08600000e+24, 3.08600000e+24,", "v.units = a1.units return v def uconcatenate(arrs, axis=0): \"\"\"Concatenate a", "op2): \"\"\"Matrix or vector dot product that preserves units This", "this needs to be defined for all numpy versions, see", "base units. Parameters ---------- unit_system : string, optional The unit", "arguments are passed to the equivalency, which should be used", "E = YTQuantity(2.5, \"erg/s\") >>> E.convert_to_base(unit_system=\"galactic\") \"\"\" return self.convert_to_units(self.units.get_base_equivalent(unit_system)) def", "methods # def write_hdf5(self, filename, dataset_name=None, info=None, group_name=None): r\"\"\"Writes a", "out_orig = kwargs.pop('out') out = np.asarray(out_orig[0]) else: out = None", "NULL_UNIT, ) if any([ff != getattr(_, 'units', NULL_UNIT) for _", "vector dot product that preserves units This is a wrapper", "1: raise RuntimeError(\"YTQuantity instances must be scalars\") return ret def", "Begin reduction operators # @return_arr def prod(self, axis=None, dtype=None, out=None):", "YTArray or YTQuantity. Parameters ---------- arr : AstroPy Quantity The", "YTArray by the object on the right of the `*`", "arrays, fmt='%.18e', delimiter='\\t', header='', footer='', comments='#'): r\"\"\" Write YTArrays with", "or # array filled with zero if not np.any(other_object): return", "quantity with the same unit information. \"\"\" if _astropy.units is", "positive, isnat, ) binary_operators = ( add, subtract, multiply, divide,", "unit_array(self): \"\"\"Get a YTArray filled with ones with the same", "__deepcopy__(self, memodict=None): \"\"\"copy.deepcopy implementation This is necessary for stdlib deepcopy", "hour as \"h\" if unit_str == \"h\": unit_str = \"hr\"", "YTUfuncUnitError(ufunc, *units) inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units", "data.\"\"\" return self.ndarray_view() d = ndview @property def unit_quantity(self): \"\"\"Get", "E.convert_to_base(unit_system=\"galactic\") \"\"\" return self.convert_to_units(self.units.get_base_equivalent(unit_system)) def convert_to_cgs(self): \"\"\" Convert the array", "and np.any(ret): raise YTUnitOperationError(op_string, inp.units, dimensionless) return ret def validate_comparison_units(this,", "filled with ones with the same unit and shape as", "(same dimension) units. \"\"\" ro = sanitize_units_add(self, right_object, \"addition\") return", "arctan2_unit): inps, units = handle_comparison_units( inps, units, ufunc, ret_class, raise_error=True)", "(): return YTQuantity(norm, data.units) return YTArray(norm, data.units) def udot(op1, op2):", "divmod_): out_arr = tuple((ret_class(o, unit) for o in out_arr)) elif", "data in the supplied units, and returns it. Optionally, an", "np.linalg.norm that preserves units. See the documentation for that function", "self.units = new_units values = self.d values *= conversion_factor if", "don't do this, super(YTArray, self).__pow__ returns a YTArray # with", "units = op1.units*op2.units if dot.shape == (): return YTQuantity(dot, units)", "np.number, np.ndarray)): raise RuntimeError(\"YTQuantity values must be numeric\") ret =", "tan, ) class YTArray(np.ndarray): \"\"\" An ndarray subclass that attaches", "# dimensions. if isinstance(ret, YTArray): if inp.units.same_dimensions_as(ret.units): ret.in_units(inp.units) return ret", "other, 'greater than') return super(YTArray, self).__gt__(oth) # # End comparison", "requires them. Parameters ---------- units : Unit object or string,", "units to the given units. Parameters ---------- units : Unit", "header='My sphere stuff', delimiter=\"\\t\") \"\"\" if not isinstance(arrays, list): arrays", "to the object on the right. \"\"\" oth = validate_comparison_units(self,", "else: return YTQuantity(arr.magnitude, p_units, registry=unit_registry) def to_pint(self, unit_registry=None): \"\"\" Convert", "copy=False), unit) else: # numpy version equal to or newer", "comments='#'): r\"\"\" Load YTArrays with unit information from a text", "del f[dataset_name] d = g.create_dataset(dataset_name, data=self) else: d = g.create_dataset(dataset_name,", "pow in powers_dict.items(): # we have to do this because", "copy of the array data as a numpy ndarray\"\"\" return", "so we don't mix units with the same # dimensions.", "= self.units return ret # # Start operation methods #", "return super(YTArray, self).__pow__(power) def __abs__(self): \"\"\" Return a YTArray with", "class YTQuantity(YTArray): \"\"\" A scalar associated with a unit. Parameters", "modf: passthrough_unit, ldexp: bitop_units, frexp: return_without_unit, floor: passthrough_unit, ceil: passthrough_unit,", "The units you want to get a new quantity in.", "u**(power_sign*inp.shape[kwargs['axis']]) else: unit = u**(power_sign*inp.size) else: unit = self._ufunc_registry[ufunc](u) ret_class", "want to get the bare quantity in. If not specified,", "cm^3). registry : ~yt.units.unit_registry.UnitRegistry The registry to create units from.", "pickle module: http://docs.python.org/2/library/pickle.html Unit metadata is encoded in the zeroth", "the array and units to the equivalent base units in", "uvstack(arrs): \"\"\"Stack arrays in sequence vertically (row wise) while preserving", "YTArray(np.array(out_arr), unit) return ret_class(np.array(out_arr, copy=False), unit) else: # numpy version", "= words[1:] if len(words) == 2 and words[1] == \"Units\":", "u.powers): unit_str = base.to_string() # we have to do this", "self.in_units(conv_unit) this_equiv = equivalence_registry[equiv]() oneway_or_equivalent = ( conv_unit.has_equivalent(equiv) or this_equiv._one_way)", "is power: unit2 = inp2 if isinstance(unit2, np.ndarray): if isinstance(unit2,", "comparison operators # # # Begin reduction operators # @return_arr", "if norm.shape == (): return YTQuantity(norm, data.units) return YTArray(norm, data.units)", "String that will be written at the end of the", "some power. Parameters ---------- power : float or dimensionless YTArray.", "= np.intersect1d(arr1, arr2, assume_unique=assume_unique) v = validate_numpy_wrapper_units(v, [arr1, arr2]) return", "special case of adding or subtracting with zero or #", "next_one: units = words[1:] if len(words) == 2 and words[1]", "arr._units.items(): bs = convert_pint_units(base) p_units.append(\"%s**(%s)\" % (bs, Rational(exponent))) p_units =", "referring to a dataset. \\n\" \"Perhaps you meant to do", "this.units.expr is other.units.expr: if this.units.base_value == other.units.base_value: return other if", ">>> np.all(a == b) True \"\"\" _ufunc_registry = { add:", "Quantity u = arr.unit ap_units = [] for base, exponent", "with ones with the same unit and shape as this", "you want to get the bare quantity in. If not", "if _astropy.units is None: raise ImportError(\"You don't have AstroPy installed,", "the unit data from the metadata extracted in __reduce__ and", "YTArray, so it can't be None. return False oth =", "object on the right. \"\"\" # converts if possible oth", "array with the data in the supplied units, and returns", "information stripped \"\"\" return np.array(self) @classmethod def from_astropy(cls, arr, unit_registry=None):", "than numpy 1.10.0. \"\"\" if LooseVersion(np.__version__) < LooseVersion('1.10.0'): norm =", "ldexp, frexp, fmod, floor, ceil, trunc, fabs, spacing try: #", "if issubclass(cls2, cls1): return cls2 else: raise RuntimeError(\"Undefined operation for", "cls1 is cls2: return cls1 if cls1 in (np.ndarray, np.matrix,", "\"\"\"Get a view of the array data.\"\"\" return self.ndarray_view() d", "str(self) def validate_numpy_wrapper_units(v, arrs): if not any(isinstance(a, YTArray) for a", "Unit object, or string of known unit symbol, and check", "\"\"\" # Check that the other is a YTArray. if", "raise TypeError( \"Bit-twiddling operators are not defined for YTArray instances\")", "return self def __neg__(self): \"\"\" Negate the data. \"\"\" return", "\\ default_unit_symbol_lut from yt.units.equivalencies import equivalence_registry from yt.utilities.logger import ytLogger", "elif isinstance(input_units, Unit): ret.units = input_units else: ret.units = Unit(input_units,", "encoded in the zeroth element of third element of the", "All input arrays must have the same units. See the", "YTQuantity(1, 'cm') >>> b = YTQuantity(2, 'm') >>> a +", "to YTArray.\" % (str(ufunc), len(inputs))) if unit is None: out_arr", "logical_not, isreal, iscomplex, isfinite, isinf, isnan, signbit, floor, ceil, trunc,", "3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24]) cm This is equivalent to:", "new_units) if offset: np.subtract(new_array, offset*new_array.uq, new_array) return new_array else: return", "of line: \\\"%s\\\".\" % line[0]) f.close() if len(units) != num_cols:", "a = YTQuantity(4.0, \"cm**2/s\") >>> b = a.to_pint() \"\"\" from", "equivalent cgs units. \"\"\" return self.convert_to_units(self.units.get_cgs_equivalent()) def convert_to_mks(self): \"\"\" Convert", "return_without_unit, log2: return_without_unit, log10: return_without_unit, expm1: return_without_unit, log1p: return_without_unit, sqrt:", "return other @lru_cache(maxsize=128, typed=False) def _unit_repr_check_same(my_units, other_units): \"\"\" Takes a", "This wrapper around numpy.cross preserves units. See the documentation of", "attempt to infer units as well. group_name: string An optional", "__rfloordiv__(self, left_object): \"\"\" See __div__. \"\"\" lo = sanitize_units_mul(self, left_object)", "specified, the default base units of cgs are used. Examples", "-------- >>> temp, velx = yt.loadtxt(\"sphere.dat\", usecols=(1,2), delimiter=\"\\t\") \"\"\" f", "log10, expm1, log1p, sqrt, square, \\ reciprocal, sin, cos, tan,", "equal to or newer than 1.13 def __array_ufunc__(self, ufunc, method,", "sp[\"temperature\"] >>> c = sp[\"velocity_x\"] >>> yt.savetxt(\"sphere.dat\", [a,b,c], header='My sphere", "to deal with them. >>> np.log10(a) array([ -inf, 0. ,", "the documentation of numpy.intersect1d for full details. Examples -------- >>>", "arctan: return_without_unit, arctan2: arctan2_unit, arcsinh: return_without_unit, arccosh: return_without_unit, arctanh: return_without_unit,", "( add, subtract, multiply, divide, logaddexp, logaddexp2, true_divide, power, remainder,", "memodict = {} ret = super(YTArray, self).__deepcopy__(memodict) return type(self)(ret, copy.deepcopy(self.units))", "operation methods # if LooseVersion(np.__version__) < LooseVersion('1.13.0'): def __add__(self, right_object):", "(multiply_units, divide_units): out, out_arr, unit = handle_multiply_divide_units( unit, units, out,", "data-type, optional Data-type of the resulting array; default: float. delimiter", "def value(self): \"\"\"Get a copy of the array data as", "or, if none is found, uses np.float64 bypass_validation : boolean", "\"\"\" Convert the array and units to the equivalent base", "def get_inp_u_binary(ufunc, inputs): inp1 = coerce_iterable_units(inputs[0]) inp2 = coerce_iterable_units(inputs[1]) unit1", "documentation of numpy.cross for full details. \"\"\" v = np.cross(arr1,", "YTArray or YTQuantity The unitful quantity to convert from. unit_registry", "\"\"\" return self.convert_to_units(self.units.get_base_equivalent(unit_system)) def convert_to_cgs(self): \"\"\" Convert the array and", "Make sure the other object is a YTArray before we", "validate_comparison_units(self, other, 'less_than or equal') return super(YTArray, self).__le__(oth) def __eq__(self,", "= ( add, subtract, multiply, divide, logaddexp, logaddexp2, true_divide, power,", "in_units(self, units, equivalence=None, **kwargs): \"\"\" Creates a copy of this", "sanitize_units_mul(self, left_object) return super(YTArray, self).__rmul__(lo) def __imul__(self, other): \"\"\" See", "YTQuantity(ret, units) else: # This could be a subclass, so", "don't have AstroPy installed, so you can't convert to \"", "finfo.output_units else: units = finfo.units if isinstance(x, YTArray): arr =", "handle_multiply_divide_units( unit, units, out_arr, out_arr) else: raise RuntimeError( \"Support for", "unit1 = getattr(inp1, 'units', None) unit2 = getattr(inp2, 'units', None)", "zeroth element of third element of the returned tuple, itself", "String that will be written at the beginning of the", "set to the sympy expression 1/1 rather than # a", "units[0]) else: if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) inps =", "return YTQuantity(np.array(out_arr), unit) else: if ret_class is YTQuantity: # This", "units[1]) elif any_nonzero[1] == np.bool_(False): units = (units[0], units[0]) elif", "g[dataset_name] data = dataset[:] units = dataset.attrs.get('units', '') if 'unit_registry'", "The array(s) to write to the file. fmt : str", "that attaches a symbolic unit object to the array data.", "and has the same dimensions as the object # under", "a YTArray filled with ones with the same unit and", "unit) else: # numpy version equal to or newer than", "a yt UnitRegistry object. Examples -------- >>> a = YTQuantity(4.0,", "\"addition\") return super(YTArray, self).__add__(ro) def __radd__(self, left_object): \"\"\" See __add__.", ">>> E = YTQuantity(2.5, \"erg/s\") >>> E.convert_to_base(unit_system=\"galactic\") \"\"\" return self.convert_to_units(self.units.get_base_equivalent(unit_system))", "\"\"\" f = open(fname, 'r') next_one = False units =", "the right. \"\"\" # Check that the other is a", "preserve_units, } __array_priority__ = 2.0 def __new__(cls, input_array, input_units=None, registry=None,", "self.convert_to_units(self.units.get_base_equivalent(unit_system)) def convert_to_cgs(self): \"\"\" Convert the array and units to", "from yt.utilities.exceptions import \\ YTUnitOperationError, YTUnitConversionError, \\ YTUfuncUnitError, YTIterableUnitCoercionError, \\", "yt.YTArray.from_pint(b) \"\"\" p_units = [] for base, exponent in arr._units.items():", "if group_name is not None: g = f[group_name] else: g", ">>> b + a YTArray([ 4.01, 5.02, 6.03]) m NumPy", "\"\"\" return super(YTArray, self).__abs__() # # Start comparison operators. #", "file into a YTArray. Parameters ---------- filename: string The filename", "dataset_name in g.keys(): d = g[dataset_name] # Overwrite without deleting", "len(lut['m']) == 2: lut.update(default_unit_symbol_lut) for k, v in [(k, v)", "3.08600000e+24]) cm This is equivalent to: >>> b = YTArray(np.ones(5),", "else: return data.ds.quan(x, units) def get_binary_op_return_class(cls1, cls2): if cls1 is", "if ret.shape == (): return ret[()] else: return ret ufunc", "if memodict is None: memodict = {} ret = super(YTArray,", ">>> print(np.log10(a)) 1.07918124605 YTQuantity is tightly integrated with yt datasets:", "this unitful quantity, try the :meth:`list_equivalencies` method. Examples -------- >>>", "return_without_unit, signbit: return_without_unit, copysign: passthrough_unit, nextafter: preserve_units, modf: passthrough_unit, ldexp:", "equal') return super(YTArray, self).__le__(oth) def __eq__(self, other): \"\"\" Test if", "powers_dict.items(): # we have to do this because Pint doesn't", "\"\"\" def __new__(cls, input_scalar, input_units=None, registry=None, dtype=np.float64, bypass_validation=False): if not", "Parameters ---------- units : Unit object or string The units", "return unit_registry.Quantity(self.value, units) # # End unit conversion methods #", "methods # @property def value(self): \"\"\"Get a copy of the", "of this array with the data in the specified unit", "else: raise RuntimeError( \"Support for the %s ufunc has not", "less than or equal to the object on the right.", "return v def ucross(arr1, arr2, registry=None, axisa=-1, axisb=-1, axisc=-1, axis=None):", "string used to separate values. By default, this is any", "add, subtract, multiply, divide, logaddexp, logaddexp2, true_divide, \\ floor_divide, negative,", "see # numpy issue #9081 return type(self)(super(YTArray, self).__pos__(), self.units) @return_arr", "other if not this.units.same_dimensions_as(other.units): raise YTUnitOperationError(op_string, this.units, other.units) return other.in_units(this.units)", "out=self) return self def __floordiv__(self, right_object): ro = sanitize_units_mul(self, right_object)", "passthrough_unit, trunc: passthrough_unit, spacing: passthrough_unit, positive: passthrough_unit, divmod_: passthrough_unit, isnat:", "the array data. Examples -------- >>> from yt import YTQuantity", "ones with the same unit and shape as this array\"\"\"", "try: unit, lut = state[0] except TypeError: # this case", "the union of two arrays. A wrapper around numpy.intersect1d that", "= (units[0], units[0]) else: if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units)", "default: float. delimiter : str, optional The string used to", "one is not supplied, the default one will be used.", "with zeros if not inp.units.is_dimensionless and np.any(ret): raise YTUnitOperationError(op_string, inp.units,", "the documentation of numpy.concatenate for full details. Examples -------- >>>", "elif not any([u1d, u2d]): if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units)", "dtype, out), self.units @return_arr def std(self, axis=None, dtype=None, out=None, ddof=0):", "while preserving units This is a wrapper around np.vstack that", "dtype=dtype, comments=comments, delimiter=delimiter, converters=None, unpack=True, usecols=usecols, ndmin=0) if usecols is", "other_units = Unit(other_units, registry=my_units.registry) equiv_dims = em_dimensions.get(my_units.dimensions, None) if equiv_dims", "was saved prior to PR #1728 # when the pickle", "= input_units else: ret.units = Unit(input_units, registry=registry) return ret elif", "the index of the new axis in the dimensions of", "f dataset = g[dataset_name] data = dataset[:] units = dataset.attrs.get('units',", "copy of this array with the data in the equivalent", "ap_units, registry=unit_registry) def to_astropy(self, **kwargs): \"\"\" Creates a new AstroPy", "2, 3], 'cm') >>> b = YTArray([4, 5, 6], 'm')", "data. \"\"\" return self.view(np.ndarray) def to_ndarray(self): \"\"\" Creates a copy", "typed=False) def sqrt_unit(unit): return unit**0.5 @lru_cache(maxsize=128, typed=False) def multiply_units(unit1, unit2):", "\"\"\" return self.convert_to_units(self.units.get_cgs_equivalent()) def convert_to_mks(self): \"\"\" Convert the array and", "units[0]) elif not any([u1d, u2d]): if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc,", "units. \"\"\" v = np.vstack(arrs) v = validate_numpy_wrapper_units(v, arrs) return", "def __rmul__(self, left_object): \"\"\" See __mul__. \"\"\" lo = sanitize_units_mul(self,", "is preserve_units: inps, units = handle_preserve_units( inps, units, ufunc, ret_class)", "if out_arr is not None: out_arr = ufunc(inp).view(np.ndarray) return out_arr,", "None def arctan2_unit(unit1, unit2): return NULL_UNIT def comparison_unit(unit1, unit2=None): return", "and words[1] == \"Units\": next_one = True else: # Here", "@wraps(func) def wrapped(*args, **kwargs): ret, units = func(*args, **kwargs) if", "units def handle_multiply_divide_units(unit, units, out, out_arr): if unit.is_dimensionless and unit.base_value", "return np.ones_like(self) ua = unit_array def __getitem__(self, item): ret =", "operators # @return_arr def prod(self, axis=None, dtype=None, out=None): if axis", "self).__reduce__() obj_state = np_ret[2] unit_state = (((str(self.units), self.units.registry.lut),) + obj_state[:],)", "isinstance(unit2, np.ndarray): if isinstance(unit2, YTArray): if unit2.units.is_dimensionless: pass else: raise", "col in usecols] mylog.info(\"Array units: %s\" % \", \".join(units)) return", "= {multiply: 1, divide: -1} # redefine this here to", "# def __lt__(self, other): \"\"\" Test if this is less", "= yt.YTArray([1, 2, 3], 'cm') >>> B = yt.YTArray([2, 3,", "', as expected by e.g. ``yt.loadtxt``. Examples -------- >>> sp", "the object on the right of the `+` operator. Must", "passthrough_unit, exp: return_without_unit, exp2: return_without_unit, log: return_without_unit, log2: return_without_unit, log10:", "dtype of the input data, or, if none is found,", "other_units.dimensions: if current_mks in equiv_dims.free_symbols: base = \"SI\" else: base", "to an equivalent quantity which is not in the same", "the %s ufunc has not been added \" \"to YTArray.\"", "self._ufunc_registry[ufunc] inps, units, ret_class = get_inp_u_binary(ufunc, inputs) if unit_operator in", "isfinite, isinf, isnan, signbit, copysign, nextafter, \\ modf, ldexp, frexp,", "o in out_arr)) elif out_arr.size == 1: out_arr = YTQuantity(np.asarray(out_arr),", "== -1: ret = super(YTArray, self).__pow__(power) return type(self)(ret, input_units='') return", "YTArray) for o in input_object]): ff = getattr(input_object[0], 'units', NULL_UNIT,", "less_equal, not_equal, equal, logical_and, logical_or, logical_xor, maximum, minimum, fmax, fmin,", "fabs, spacing try: # numpy 1.13 or newer from numpy", "v in [(k, v) for k, v in lut.items() if", "The unitful quantity to convert from. unit_registry : Pint UnitRegistry,", "new AstroPy quantity with the same unit information. \"\"\" if", "from_hdf5(cls, filename, dataset_name=None, group_name=None): r\"\"\"Attempts read in and convert a", "will be \" \"dimensionless!\") units = [\"dimensionless\"]*num_cols arrays = np.loadtxt(fname,", "raise YTUnitOperationError(ufunc, *units) inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps,", "pass through units where appropriate. >>> import numpy as np", "maximum, minimum, fmax, fmin, copysign, nextafter, ldexp, fmod, divmod_, heaviside", "import h5py from yt.extern.six.moves import cPickle as pickle if dataset_name", "self._ufunc_registry[ufunc](u) ret_class = type(self) elif len(inputs) == 2: unit_operator =", "left_object, \"subtraction\") return super(YTArray, self).__rsub__(lo) def __isub__(self, other): \"\"\" See", "else: finfo = data.ds._get_field_info(field) if finfo.sampling_type == 'particle': units =", "dtype, out), units @return_arr def mean(self, axis=None, dtype=None, out=None): return", "from yt import YTQuantity >>> a = YTQuantity(1, 'cm') >>>", "ret_class(inps[0]).units)) return inps, units def handle_multiply_divide_units(unit, units, out, out_arr): if", "raise RuntimeError( \"Support for the %s ufunc with %i inputs", "+ obj_state[:],) new_ret = np_ret[:2] + unit_state + np_ret[3:] return", "False return True def return_arr(func): @wraps(func) def wrapped(*args, **kwargs): ret,", "binary_operators: unit_operator = self._ufunc_registry[context[0]] inps, units, ret_class = get_inp_u_binary(ufunc, inputs)", "is angle and ufunc in trigonometric_operators: inp = inp.in_units('radian').v if", "# If the other object is not a YTArray, then", "it will be the first dimension and if ``axis=-1`` it", "because Pint doesn't recognize # \"yr\" as \"year\" if str(unit).endswith(\"yr\")", "power, remainder, mod, arctan2, hypot, bitwise_and, bitwise_or, bitwise_xor, left_shift, right_shift,", "will pass through units where appropriate. >>> import numpy as", "string, optional The units you want to get the bare", "value of 1.0\"\"\" return YTQuantity(1.0, self.units) uq = unit_quantity @property", "= base.to_string() # we have to do this because AstroPy", "divmod_, heaviside ) trigonometric_operators = ( sin, cos, tan, )", "== (): return ret[()] else: return ret ufunc = context[0]", "= -1 for line in f.readlines(): words = line.strip().split() if", "is tightly integrated with yt datasets: >>> import yt >>>", "possible oth = validate_comparison_units(self, other, 'less_than') return super(YTArray, self).__lt__(oth) def", "character used to indicate the start of a comment; default:", "equiv, **kwargs): \"\"\" Convert a YTArray or YTQuantity to an", "numpy as np >>> a = YTArray(np.arange(8) - 4, 'g/cm**3')", "a str. units = Unit(input_units, registry=registry) # Attach the units", "the quantity. Powers must be specified using python syntax (cm**3,", "units = (units[0], units[0]) else: if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc,", "Parameters ---------- input_array : :obj:`!iterable` A tuple, list, or array", "axis=axis) v = validate_numpy_wrapper_units(v, arrs) return v def ucross(arr1, arr2,", "for base, exponent in zip(u.bases, u.powers): unit_str = base.to_string() #", "value @property def ndview(self): \"\"\"Get a view of the array", "view into the array, but as an ndarray rather than", "= h5py.File(filename) if group_name is not None: g = f[group_name]", "cls1 if issubclass(cls1, cls2): return cls1 if issubclass(cls2, cls1): return", "= \"*\".join(ap_units) if isinstance(arr.value, np.ndarray): return YTArray(arr.value, ap_units, registry=unit_registry) else:", "method. Default: None Returns ------- YTArray \"\"\" if equivalence is", "oth = sanitize_units_add(self, other, \"subtraction\") np.subtract(self, oth, out=self) return self", "is None: if registry is None: ret.units = input_array.units else:", "not isinstance(input_scalar, (numeric_type, np.number, np.ndarray)): raise RuntimeError(\"YTQuantity values must be", "deg2rad: return_without_unit, rad2deg: return_without_unit, bitwise_and: bitop_units, bitwise_or: bitop_units, bitwise_xor: bitop_units,", "out), self.units @return_arr def std(self, axis=None, dtype=None, out=None, ddof=0): return", "copy=False) elif ufunc in (modf, divmod_): out_arr = tuple((ret_class(o, unit)", "left_object): \"\"\" See __div__. \"\"\" lo = sanitize_units_mul(self, left_object) return", "sympy expression 1/1 rather than # a dimensionless Unit object.", "itself a tuple used to restore the state of the", "(units[1], units[1]) elif any_nonzero[1] == np.bool_(False): units = (units[0], units[0])", "\"\"\"Test if this is less than or equal to the", "'cm') >>> uunion1d(A, B) YTArray([ 1., 2., 3., 4.]) cm", "return YTQuantity(ret, units) else: # This could be a subclass,", "super(YTArray, self).__truediv__(ro) def __rtruediv__(self, left_object): \"\"\" See __div__. \"\"\" lo", "== \"h\": unit_str = \"hr\" ap_units.append(\"%s**(%s)\" % (unit_str, Rational(exponent))) ap_units", "\"*\".join(ap_units) if isinstance(arr.value, np.ndarray): return YTArray(arr.value, ap_units, registry=unit_registry) else: return", "square_unit, reciprocal: reciprocal_unit, sin: return_without_unit, cos: return_without_unit, tan: return_without_unit, sinh:", "axis=None, dtype=None, out=None, ddof=0): return super(YTArray, self).std(axis, dtype, out, ddof),", "if the pickle was saved prior to PR #1728 #", "isnan, signbit, copysign, nextafter, \\ modf, ldexp, frexp, fmod, floor,", "if len(units) != num_cols: mylog.warning(\"Malformed or incomplete units header. Arrays", "= np.asarray(input_array, dtype=dtype).view(cls) obj.units = input_units if registry is not", "numbers import Number as numeric_type from yt.utilities.on_demand_imports import _astropy from", "return self def __pow__(self, power): \"\"\" Raise this YTArray to", "the bare quantity in. If not specified, the value will", "\"\"\" oth = sanitize_units_add(self, other, \"addition\") np.add(self, oth, out=self) return", "out = None if len(inputs) == 1: _, inp, u", "array; default: float. delimiter : str, optional The string used", "np.ndarray): return data.ds.arr(x, units) else: return data.ds.quan(x, units) def get_binary_op_return_class(cls1,", "pickle if dataset_name is None: dataset_name = 'array_data' f =", "info=myinfo) \"\"\" from yt.utilities.on_demand_imports import _h5py as h5py from yt.extern.six.moves", "YTUnitConversionError, \\ YTUfuncUnitError, YTIterableUnitCoercionError, \\ YTInvalidUnitEquivalence, YTEquivalentDimsError from yt.utilities.lru_cache import", "that preserves units This is a wrapper around np.linalg.norm that", "inp = inputs[0] u = getattr(inp, 'units', None) if u", "\" + '\\t'.join(units) np.savetxt(fname, np.transpose(arrays), header=header, fmt=fmt, delimiter=delimiter, footer=footer, newline='\\n',", "array with the data in the specified unit system, and", "else: d = g.create_dataset(dataset_name, data=self) for k, v in info.items():", "lo = sanitize_units_add(self, left_object, \"addition\") return super(YTArray, self).__radd__(lo) def __iadd__(self,", "not None: units = [units[col] for col in usecols] mylog.info(\"Array", "!= num_cols: mylog.warning(\"Malformed or incomplete units header. Arrays will be", "validate_comparison_units(self, other, 'less_than') return super(YTArray, self).__lt__(oth) def __le__(self, other): \"\"\"Test", "UnitRegistry(lut=lut, add_default_symbols=False) self.units = Unit(unit, registry=registry) def __deepcopy__(self, memodict=None): \"\"\"copy.deepcopy", "return YTQuantity(arr.value, ap_units, registry=unit_registry) def to_astropy(self, **kwargs): \"\"\" Creates a", "# if LooseVersion(np.__version__) < LooseVersion('1.13.0'): def __add__(self, right_object): \"\"\" Add", "unit2 = inp2 if isinstance(unit2, np.ndarray): if isinstance(unit2, YTArray): if", "created before we serialized the unit symbol lookup table #", "first. For example, ``usecols = (1,4,5)`` will extract the 2nd,", "let Unit() handle units arg if it's not already a", "np.vstack that preserves units. \"\"\" v = np.vstack(arrs) v =", "4], 'cm') >>> uintersect1d(A, B) YTArray([ 2., 3.]) cm \"\"\"", "= f[group_name] else: g = f.create_group(group_name) else: g = f", "4], 'cm') >>> uconcatenate((A, B)) YTArray([ 1., 2., 3., 2.,", "cPickle as pickle if info is None: info = {}", "def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): func = getattr(ufunc, method)", "A dictionary of supplementary info to write to append as", "yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.arr(np.ones(5), 'code_length') >>> a.in_cgs() YTArray([ 3.08600000e+24,", "the array and units to the equivalent cgs units. \"\"\"", "isinstance(out_orig[0], YTArray): out_orig[0].units = unit return out_arr def copy(self, order='C'):", "if info is None: info = {} info['units'] = str(self.units)", "is a wrapper around np.linalg.norm that preserves units. See the", "# with a unit attribute set to the sympy expression", "columns being read. comments : str, optional The character used", "YTArrays to. arrays : list of YTArrays or single YTArray", "data. Examples -------- >>> from yt import YTQuantity >>> a", "x, field): field = data._determine_fields(field)[0] if isinstance(field, tuple): finfo =", "particular equivalency requires them. Parameters ---------- units : Unit object", "usecols=usecols, ndmin=0) if usecols is not None: units = [units[col]", "if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) inps = (inps[0], ret_class(inps[1]).to(", "\"an AstroPy quantity.\") return self.value*_astropy.units.Unit(str(self.units), **kwargs) @classmethod def from_pint(cls, arr,", "import lru_cache from numbers import Number as numeric_type from yt.utilities.on_demand_imports", "if 'unit_registry' in dataset.attrs.keys(): unit_lut = pickle.loads(dataset.attrs['unit_registry'].tostring()) else: unit_lut =", "self.units, unit) def list_equivalencies(self): \"\"\" Lists the possible equivalencies associated", "array data as a numpy ndarray\"\"\" return np.array(self) v =", "supported for this unitful quantity, try the :meth:`list_equivalencies` method. Default:", "if that particular equivalency requires them. Parameters ---------- units :", "Check that the other is a YTArray. oth = validate_comparison_units(self,", "the other is a YTArray. oth = validate_comparison_units( self, other,", "for full details. Examples -------- >>> A = yt.YTArray([1, 2,", "inps, units, ufunc, ret_class) elif unit_operator is preserve_units: inps, units", "other, 'equal') return super(YTArray, self).__eq__(oth) def __ne__(self, other): \"\"\" Test", "kwargs['axis'] is not None: unit = u**(power_sign*inp.shape[kwargs['axis']]) else: unit =", "= 'array_data' f = h5py.File(filename) if group_name is not None:", "the same as a yt UnitRegistry object. Examples -------- >>>", "with yt datasets: >>> import yt >>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030')", "in arr._units.items(): bs = convert_pint_units(base) p_units.append(\"%s**(%s)\" % (bs, Rational(exponent))) p_units", "def divide_units(unit1, unit2): return unit1/unit2 @lru_cache(maxsize=128, typed=False) def reciprocal_unit(unit): return", "for k, v in info.items(): d.attrs[k] = v f.close() @classmethod", "Unit(registry=unit.registry) return out, out_arr, unit def coerce_iterable_units(input_object): if isinstance(input_object, np.ndarray):", "def __repr__(self): \"\"\" \"\"\" return super(YTArray, self).__repr__()+' '+self.units.__repr__() def __str__(self):", "a text file. Parameters ---------- fname : str The file", "= sanitize_units_mul(self, other) np.floor_divide(self, oth, out=self) return self def __or__(self,", "Start unit conversion methods # def convert_to_units(self, units): \"\"\" Convert", "np.number, list, tuple)): return cls1 if issubclass(cls1, YTQuantity): return cls2", "info['units'] = str(self.units) info['unit_registry'] = np.void(pickle.dumps(self.units.registry.lut)) if dataset_name is None:", "'units'): ret.units = self.units return ret # # Start operation", "\"\"\"Concatenate a sequence of arrays. This wrapper around numpy.concatenate preserves", "UnitRegistry() >>> a = np.random.random(10) >>> b = ureg.Quantity(a, \"erg/cm**3\")", "write a dataset to dataset_name: string The name of the", "wise) while preserving units This is a wrapper around np.hstack", "\"Units\": next_one = True else: # Here we catch the", "as \"year\" if str(unit).endswith(\"yr\") and len(str(unit)) in [2,3]: unit =", "correct (same dimension) units. \"\"\" ro = sanitize_units_add(self, right_object, \"subtraction\")", ">>> a = YTQuantity(4.0, \"cm**2/s\") >>> b = a.to_pint() \"\"\"", "units to input_units : String unit specification, unit symbol object,", "input_units is already associated with a unit registry and this", "class type obj = np.asarray(input_array, dtype=dtype).view(cls) # Check units type", "= u**(power_sign*inp.shape[kwargs['axis']]) else: unit = u**(power_sign*inp.size) else: unit = self._ufunc_registry[ufunc](u)", "return unit1/unit2 @lru_cache(maxsize=128, typed=False) def reciprocal_unit(unit): return unit**-1 def passthrough_unit(unit,", "See __sub__. \"\"\" lo = sanitize_units_add(self, left_object, \"subtraction\") return super(YTArray,", "return arr if isinstance(x, np.ndarray): return data.ds.arr(x, units) else: return", "on the right of the `*` operator. The unit objects", "p_units, registry=unit_registry) else: return YTQuantity(arr.magnitude, p_units, registry=unit_registry) def to_pint(self, unit_registry=None):", "input_units is None: if registry is None: ret.units = input_array.units", "new_ret = np_ret[:2] + unit_state + np_ret[3:] return new_ret def", "super(YTArray, self).__reduce__() obj_state = np_ret[2] unit_state = (((str(self.units), self.units.registry.lut),) +", "np.subtract(self, offset*self.uq, self) return self def convert_to_base(self, unit_system=\"cgs\"): \"\"\" Convert", "dtype=None, out=None, ddof=0): return super(YTArray, self).std(axis, dtype, out, ddof), self.units", ">>> a = YTQuantity(12, 'g/cm**3') >>> np.abs(a) 12 g/cm**3 and", "v = self.in_units(units, equivalence=equivalence, **kwargs).value if isinstance(self, YTQuantity): return float(v)", "`-` from this ytarray. Must check for the correct (same", "this array's data. \"\"\" return self.view(np.ndarray) def to_ndarray(self): \"\"\" Creates", "**kwargs) if isinstance(new_arr, tuple): try: return type(self)(new_arr[0], new_arr[1]).in_units(unit) except YTUnitConversionError:", "(((str(self.units), self.units.registry.lut),) + obj_state[:],) new_ret = np_ret[:2] + unit_state +", "tan, arcsin, arccos, arctan, arctan2, \\ hypot, sinh, cosh, tanh,", "return YTArray(np.array(input_array, dtype=dtype), input_array[0].units, registry=registry) # Input array is an", "out=self) return self def __div__(self, right_object): \"\"\" Divide this YTArray", "__new__(cls, input_array, input_units=None, registry=None, dtype=None, bypass_validation=False): if dtype is None:", "minimum, fmax, fmin, copysign, nextafter, ldexp, fmod, divmod_, heaviside )", "0.60205999, 0.69897 , 0.77815125, 0.84509804]) YTArray is tightly integrated with", "lut.items() if len(v) == 2]: lut[k] = v + (0.0,", "fmin, copysign, nextafter, ldexp, fmod, divmod_, heaviside ) trigonometric_operators =", "= ( negative, absolute, rint, sign, conj, exp, exp2, log,", "self.shape and d.dtype == self.dtype: d[...] = self for k", "return super(YTArray, self).prod(axis, dtype, out), units @return_arr def mean(self, axis=None,", "Default: None Returns ------- YTArray \"\"\" if equivalence is None:", "= get_inp_u_binary(ufunc, inputs) if unit_operator in (comparison_unit, arctan2_unit): inps, units", "metadata is encoded in the zeroth element of third element", "registry=unit_registry) def to_astropy(self, **kwargs): \"\"\" Creates a new AstroPy quantity", "unit_registry : Pint UnitRegistry, optional The Pint UnitRegistry to use", "sanitize_units_add(self, right_object, \"subtraction\") return super(YTArray, self).__sub__(ro) def __rsub__(self, left_object): \"\"\"", "file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- import copy import", "wise) while preserving units This is a wrapper around np.vstack", "YTQuantity with # size > 1 return YTArray(np.array(out_arr), unit) return", "= np.void(pickle.dumps(self.units.registry.lut)) if dataset_name is None: dataset_name = 'array_data' f", "arctanh: return_without_unit, hypot: preserve_units, deg2rad: return_without_unit, rad2deg: return_without_unit, bitwise_and: bitop_units,", "info is None: info = {} info['units'] = str(self.units) info['unit_registry']", "scalar The scalar to attach units to input_units : String", "object. Defaults to False. Examples -------- >>> from yt import", "ndarray_view(self): \"\"\" Returns a view into the array, but as", "(0.0, r'\\rm{' + k.replace('_', '\\ ') + '}') registry =", "divide_units, floor_divide: divide_units, negative: passthrough_unit, power: power_unit, remainder: preserve_units, mod:", "out=None): return super(YTArray, self).dot(b), self.units*b.units def __reduce__(self): \"\"\"Pickle reduction method", "super(YTArray, self).__pow__(power) def __abs__(self): \"\"\" Return a YTArray with the", "instance # We first cast to be our class type", "= _unit_repr_check_same(self.units, units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units) self.units = new_units", "ytarray. Returns ------- View of this array's data. \"\"\" return", "__reduce__ and then serialized by pickle. \"\"\" super(YTArray, self).__setstate__(state[1:]) try:", "symbol lookup table # into the pickle file unit, lut", "= sanitize_units_add(self, other, \"addition\") np.add(self, oth, out=self) return self def", "not a YTArray, then one of the arrays must be", "'code_length', registry=ds.unit_registry) >>> np.all(a == b) True \"\"\" _ufunc_registry =", "in an hdf5 file into a YTArray. Parameters ---------- filename:", "a subclass, so don't call YTArray directly. return type(args[0])(ret, units)", "or astropy units The units of the array. Powers must", "the unit information stripped \"\"\" return np.array(self) @classmethod def from_astropy(cls,", "== 2]: lut[k] = v + (0.0, r'\\rm{' + k.replace('_',", "\\ modf, ldexp, frexp, fmod, floor, ceil, trunc, fabs, spacing", "== 1: return YTQuantity(np.array(out_arr), unit) else: if ret_class is YTQuantity:", "sanitize_units_add(self, left_object, \"addition\") return super(YTArray, self).__radd__(lo) def __iadd__(self, other): \"\"\"", "else: units = finfo.units if isinstance(x, YTArray): arr = copy.deepcopy(x)", "%s ufunc with %i inputs has not been\" \"added to", "5th and 6th columns. The default, None, results in all", "def sum(self, axis=None, dtype=None, out=None): return super(YTArray, self).sum(axis, dtype, out),", "unit2), ret_class def handle_preserve_units(inps, units, ufunc, ret_class): if units[0] !=", "exp: return_without_unit, exp2: return_without_unit, log: return_without_unit, log2: return_without_unit, log10: return_without_unit,", "to write the YTArrays to. arrays : list of YTArrays", "can lead to significant speedups in the input validation logic", "YTArray): out_orig[0].units = unit return out_arr def copy(self, order='C'): return", "An alias for YTArray.in_units(). See the docstrings of that function", "return self def __floordiv__(self, right_object): ro = sanitize_units_mul(self, right_object) return", "= YTQuantity(4.0, \"cm**2/s\") >>> b = a.to_pint() \"\"\" from pint", "unit system. Parameters ---------- unit_system : string, optional The unit", "if isinstance(ret, YTQuantity) and ret.shape != (): ret = ret.view(YTArray)", "units, ret_class = get_inp_u_binary(ufunc, inputs) if unit_operator in (comparison_unit, arctan2_unit):", "dot(self, b, out=None): return super(YTArray, self).dot(b), self.units*b.units def __reduce__(self): \"\"\"Pickle", "is older than numpy 1.10.0. \"\"\" if LooseVersion(np.__version__) < LooseVersion('1.10.0'):", "equiv): \"\"\" Check to see if this YTArray or YTQuantity", "sin, cos, tan, ) class YTArray(np.ndarray): \"\"\" An ndarray subclass", "\"\"\" return super(YTArray, self).__neg__() def __mul__(self, right_object): \"\"\" Multiply this", "other.in_units(this.units) return other @lru_cache(maxsize=128, typed=False) def _unit_repr_check_same(my_units, other_units): \"\"\" Takes", "log1p: return_without_unit, sqrt: sqrt_unit, square: square_unit, reciprocal: reciprocal_unit, sin: return_without_unit,", "def __truediv__(self, right_object): ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__truediv__(ro)", "def comparison_unit(unit1, unit2=None): return None def invert_units(unit): raise TypeError( \"Bit-twiddling", "ignored if the version of numpy installed is older than", "left_shift, right_shift, greater, greater_equal, less, less_equal, not_equal, equal, logical_and, logical_or,", "offset) = self.units.get_conversion_factor(new_units) new_array = type(self)(self.ndview * conversion_factor, new_units) if", "raise RuntimeError(\"Your arrays must have identical units.\") v.units = a1.units", "sqrt_unit, square: square_unit, reciprocal: reciprocal_unit, sin: return_without_unit, cos: return_without_unit, tan:", "other is a YTArray. if hasattr(other, 'units'): if this.units.expr is", "arrays. This wrapper around numpy.concatenate preserves units. All input arrays", "a bare NumPy array. Optionally, an equivalence can be specified", "type(args[0])(ret, units) return wrapped @lru_cache(maxsize=128, typed=False) def sqrt_unit(unit): return unit**0.5", "= self._ufunc_registry[context[0]](u) ret_class = type(self) elif ufunc in binary_operators: unit_operator", "the iterable. return YTArray(input_object) return input_object else: return input_object def", "values. By default, this is any whitespace. usecols : sequence,", "as numeric_type from yt.utilities.on_demand_imports import _astropy from sympy import Rational", "= str(self.units) info['unit_registry'] = np.void(pickle.dumps(self.units.registry.lut)) if dataset_name is None: dataset_name", "being the first. For example, ``usecols = (1,4,5)`` will extract", ">>> uconcatenate((A, B)) YTArray([ 1., 2., 3., 2., 3., 4.])", "return inps, units def handle_comparison_units(inps, units, ufunc, ret_class, raise_error=False): if", "a YTArray or YTQuantity. Parameters ---------- arr : AstroPy Quantity", "line in f.readlines(): words = line.strip().split() if len(words) == 0:", "= [] num_cols = -1 for line in f.readlines(): words", "dataset.attrs.get('units', '') if 'unit_registry' in dataset.attrs.keys(): unit_lut = pickle.loads(dataset.attrs['unit_registry'].tostring()) else:", "the arrays must be # dimensionless or filled with zeros", "def has_equivalent(self, equiv): \"\"\" Check to see if this YTArray", "infer units as well. group_name: string An optional group to", "list, tuple)): return cls1 if issubclass(cls1, YTQuantity): return cls2 if", "sin, cos, tan, arcsin, arccos, arctan, arctan2, \\ hypot, sinh,", "this, super(YTArray, self).__pow__ returns a YTArray # with a unit", "if this.units.base_value == other.units.base_value: return other if not this.units.same_dimensions_as(other.units): raise", "\"\"\"Stack arrays in sequence vertically (row wise) while preserving units", "def loadtxt(fname, dtype='float', delimiter='\\t', usecols=None, comments='#'): r\"\"\" Load YTArrays with", "this array\"\"\" return np.ones_like(self) ua = unit_array def __getitem__(self, item):", "%s\" % \", \".join(units)) return tuple([YTArray(arr, unit) for arr, unit", "a wrapper around np.vstack that preserves units. \"\"\" v =", "context) if isinstance(ret, YTQuantity) and ret.shape != (): ret =", "= validate_comparison_units(self, other, 'less_than or equal') return super(YTArray, self).__le__(oth) def", "must have identical units.\") v.units = a1.units return v def", "unit registry and this is specified, this will be used", "inputs) if unit_operator in (comparison_unit, arctan2_unit): inps, units = handle_comparison_units(", "right. \"\"\" # Check that the other is a YTArray.", "isinstance(input_array, np.ndarray): pass elif iterable(input_array) and input_array: if isinstance(input_array[0], YTArray):", "obj def __repr__(self): \"\"\" \"\"\" return super(YTArray, self).__repr__()+' '+self.units.__repr__() def", "greater, greater_equal, less, less_equal, not_equal, equal, logical_and, \\ logical_or, logical_xor,", "getattr(input_array, 'dtype', np.float64) if bypass_validation is True: obj = np.asarray(input_array,", "if dataset_name is None: dataset_name = 'array_data' f = h5py.File(filename)", "iterable(input_object): if any([isinstance(o, YTArray) for o in input_object]): ff =", "state): \"\"\"Pickle setstate method This is called inside pickle.read() and", "= ds.quan(5, 'code_length') >>> a.in_cgs() 1.543e+25 cm This is equivalent", "YTQuantity(12, 'g/cm**3') >>> np.abs(a) 12 g/cm**3 and strip them when", "__getitem__(self, item): ret = super(YTArray, self).__getitem__(item) if ret.shape == ():", "Pint \"Quantity\" to a YTArray or YTQuantity. Parameters ---------- arr", "left_object) return super(YTArray, self).__rmul__(lo) def __imul__(self, other): \"\"\" See __mul__.", "The units of the quantity. Powers must be specified using", "4.]) cm \"\"\" v = np.concatenate(arrs, axis=axis) v = validate_numpy_wrapper_units(v,", "= Unit(input_units, registry=registry) # Attach the units obj.units = units", "lru_cache from numbers import Number as numeric_type from yt.utilities.on_demand_imports import", "equivalent cgs units, and returns it. Returns ------- Quantity object", "= validate_numpy_wrapper_units(v, arrs) return v def uhstack(arrs): \"\"\"Stack arrays in", "YTArray. The pow value. \"\"\" if isinstance(power, YTArray): if not", "1.13 def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): func = getattr(ufunc,", "registry=ds.unit_registry) >>> np.all(a == b) True \"\"\" def __new__(cls, input_scalar,", "other): \"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self, other) np.floor_divide(self,", "= f if dataset_name in g.keys(): d = g[dataset_name] #", "right_object) return super(YTArray, self).__mul__(ro) def __rmul__(self, left_object): \"\"\" See __mul__.", "registry=ds.unit_registry) >>> np.all(a == b) True \"\"\" _ufunc_registry = {", "self).__and__(right_object) def __rand__(self, left_object): return super(YTArray, self).__rand__(left_object) def __iand__(self, other):", "right of the `/` operator. \"\"\" ro = sanitize_units_mul(self, right_object)", "not inp.units.same_dimensions_as(ret.units): # handle special case of adding or subtracting", "self def __xor__(self, right_object): return super(YTArray, self).__xor__(right_object) def __rxor__(self, left_object):", "if equiv_dims == other_units.dimensions: if current_mks in equiv_dims.free_symbols: base =", "\"\"\" # Converting from AstroPy Quantity u = arr.unit ap_units", "__str__(self): \"\"\" \"\"\" return str(self.view(np.ndarray)) + ' ' + str(self.units)", "'units', None) unit2 = getattr(inp2, 'units', None) ret_class = get_binary_op_return_class(type(inp1),", "units[1].dimensions: out_arr = np.multiply(out_arr.view(np.ndarray), unit.base_value, out=out) unit = Unit(registry=unit.registry) return", "See the docstrings of that function for details. \"\"\" return", "YTQuantity with the same unit as this array and a", "str The units you want to convert to. \"\"\" new_units", "floor_divide, negative, power, remainder, mod, absolute, rint, \\ sign, conj,", "None: units = [units[col] for col in usecols] mylog.info(\"Array units:", "equal') return super(YTArray, self).__ge__(oth) def __gt__(self, other): \"\"\" Test if", "copy.deepcopy(x) arr.convert_to_units(units) return arr if isinstance(x, np.ndarray): return data.ds.arr(x, units)", "at beginning of line: \\\"%s\\\".\" % line[0]) f.close() if len(units)", "numpy installed is older than numpy 1.10.0. \"\"\" if LooseVersion(np.__version__)", "isreal, iscomplex, isfinite, isinf, isnan, signbit, floor, ceil, trunc, modf,", "divide, logaddexp, logaddexp2, true_divide, power, remainder, mod, arctan2, hypot, bitwise_and,", "multiply: multiply_units, divide: divide_units, logaddexp: return_without_unit, logaddexp2: return_without_unit, true_divide: divide_units,", "for the %s ufunc has not been added \" \"to", "out=out, **kwargs) if ufunc in (multiply, divide) and method ==", "to indicate the start of a comment; default: '#'. Examples", "dataset_name=None, group_name=None): r\"\"\"Attempts read in and convert a dataset in", "left_shift, right_shift, \\ greater, greater_equal, less, less_equal, not_equal, equal, logical_and,", "bitwise_xor, left_shift, right_shift, greater, greater_equal, less, less_equal, not_equal, equal, logical_and,", "speedups in the input validation logic adds significant overhead. If", "fname : str Filename to read. dtype : data-type, optional", "and returns it. Optionally, an equivalence can be specified to", "ret.units = self.units return ret # # Start operation methods", "value(self): \"\"\"Get a copy of the array data as a", "__lt__(self, other): \"\"\" Test if this is less than the", "---------- filename: string The filename to of the hdf5 file.", "num_cols: mylog.warning(\"Malformed or incomplete units header. Arrays will be \"", "instances must be scalars\") return ret def __repr__(self): return str(self)", "a YTArray # with a unit attribute set to the", "= input_units if registry is not None: obj.units.registry = registry", "the %s ufunc with %i inputs has not been\" \"added", "extracted in __reduce__ and then serialized by pickle. \"\"\" super(YTArray,", "a constant factor but not in the same units. Parameters", "Lists the possible equivalencies associated with this YTArray or YTQuantity.", "power: unit2 = inp2 if isinstance(unit2, np.ndarray): if isinstance(unit2, YTArray):", "self.units = Unit(unit, registry=registry) def __deepcopy__(self, memodict=None): \"\"\"copy.deepcopy implementation This", "power == -1: ret = super(YTArray, self).__pow__(power) return type(self)(ret, input_units='')", "my_units, my_units.dimensions, other_units, other_units.dimensions) return other_units unary_operators = ( negative,", "keepdims argument is ignored if the version of numpy installed", "(preserve_units, comparison_unit, arctan2_unit): inps, units = handle_comparison_units( inps, units, ufunc,", "for a in arrs): raise RuntimeError(\"Not all of your arrays", "Units\\n \" + '\\t'.join(units) np.savetxt(fname, np.transpose(arrays), header=header, fmt=fmt, delimiter=delimiter, footer=footer,", "if issubclass(cls1, YTQuantity): return cls2 if issubclass(cls2, YTQuantity): return cls1", "4.]) cm \"\"\" v = np.union1d(arr1, arr2) v = validate_numpy_wrapper_units(v,", "astropy units The units of the quantity. Powers must be", "self.convert_to_units(self.units.get_mks_equivalent()) def in_units(self, units, equivalence=None, **kwargs): \"\"\" Creates a copy", "AstroPy Quantity u = arr.unit ap_units = [] for base,", "optional The equivalence you wish to use. To see which", "d = g.create_dataset(dataset_name, data=self) for k, v in info.items(): d.attrs[k]", "2, 3], 'cm') >>> B = yt.YTArray([2, 3, 4], 'cm')", "iscomplex, isfinite, isinf, isnan, signbit, floor, ceil, trunc, modf, frexp,", "other_units unary_operators = ( negative, absolute, rint, sign, conj, exp,", "and restores the unit data from the metadata extracted in", "If not specified, the arrays are datasets at the top", "= self._ufunc_registry[ufunc](u) ret_class = type(self) elif len(inputs) == 2: unit_operator", "'units'): if this.units.expr is other.units.expr: if this.units.base_value == other.units.base_value: return", "as comments. Default: '# ', as expected by e.g. ``yt.loadtxt``.", "the right. \"\"\" # converts if possible oth = validate_comparison_units(self,", "True oth = validate_comparison_units(self, other, 'not equal') return super(YTArray, self).__ne__(oth)", "other): \"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self, other) np.divide(self,", "data.units) return YTArray(norm, data.units) def udot(op1, op2): \"\"\"Matrix or vector", "None: unit_registry = UnitRegistry() powers_dict = self.units.expr.as_powers_dict() units = []", "out_arr out_arr.units = unit if out_arr.size == 1: return YTQuantity(np.array(out_arr),", "if isinstance(arr.magnitude, np.ndarray): return YTArray(arr.magnitude, p_units, registry=unit_registry) else: return YTQuantity(arr.magnitude,", "array with the data in the equivalent cgs units, and", "def prod(self, axis=None, dtype=None, out=None): if axis is not None:", "# Start unit conversion methods # def convert_to_units(self, units): \"\"\"", "arr : YTArray or YTQuantity The unitful quantity to convert", "other is None: # self is a YTArray, so it", "raise RuntimeError(\"Not all of your arrays are YTArrays.\") a1 =", "arrays must be # dimensionless or filled with zeros if", "**kwargs): \"\"\" Creates a new AstroPy quantity with the same", "validate_numpy_wrapper_units(v, arrs) return v def array_like_field(data, x, field): field =", "Convert the array and units to the equivalent cgs units.", "See __div__. \"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rtruediv__(lo)", "unit.base_value != 1.0: if not units[0].is_dimensionless: if units[0].dimensions == units[1].dimensions:", "coerce_iterable_units(inputs[0]) inp2 = coerce_iterable_units(inputs[1]) unit1 = getattr(inp1, 'units', None) unit2", "one will be used. \"\"\" # Converting from AstroPy Quantity", "'reduce': power_sign = POWER_SIGN_MAPPING[ufunc] if 'axis' in kwargs and kwargs['axis']", "this is specified, this will be used instead of the", "import convert_pint_units NULL_UNIT = Unit() POWER_SIGN_MAPPING = {multiply: 1, divide:", "= YTQuantity(2, 'm') >>> a + b 201.0 cm >>>", "with the unit information stripped \"\"\" return np.array(self) @classmethod def", "overhead. If set, input_units *must* be a valid unit object.", "(conversion_factor, offset) = self.units.get_conversion_factor(new_units) self.units = new_units values = self.d", "this array with the data in the specified unit system,", "def invert_units(unit): raise TypeError( \"Bit-twiddling operators are not defined for", "from. If input_units is already associated with a unit registry", "= tuple((ret_class(o, unit) for o in out_arr)) elif out_arr.size ==", "self).mean(axis, dtype, out), self.units @return_arr def sum(self, axis=None, dtype=None, out=None):", "comparison_unit, less_equal: comparison_unit, not_equal: comparison_unit, equal: comparison_unit, logical_and: comparison_unit, logical_or:", "If not specified, the value will be returned in the", "doesn't recognize # \"yr\" as \"year\" if str(unit).endswith(\"yr\") and len(str(unit))", "return cls1 if cls1 in (np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls1,", "unit2) unit2 = 1.0 return (inp1, inp2), (unit1, unit2), ret_class", "minimum, fmax, fmin, \\ isreal, iscomplex, isfinite, isinf, isnan, signbit,", "of the array data as a numpy ndarray\"\"\" return np.array(self)", "to avoid a circular import from yt.funcs def iterable(obj): try:", "\"\"\" ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__mul__(ro) def __rmul__(self,", "sanitize_units_mul(self, left_object) return super(YTArray, self).__rtruediv__(lo) def __itruediv__(self, other): \"\"\" See", ">>> b = YTQuantity(2, 'm') >>> a + b 201.0", "write the YTArrays to. arrays : list of YTArrays or", "= finfo.units if isinstance(x, YTArray): arr = copy.deepcopy(x) arr.convert_to_units(units) return", "if not np.any(other_object): return ret.view(np.ndarray) elif not np.any(this_object): return ret", "equivalencies associated with this YTArray or YTQuantity. \"\"\" self.units.list_equivalencies() def", "\"\"\" if units is None: v = self.value else: v", "ret_class = get_inp_u_binary(ufunc, inputs) if unit_operator in (preserve_units, comparison_unit, arctan2_unit):", "copy import numpy as np from distutils.version import LooseVersion from", "def ndview(self): \"\"\"Get a view of the array data.\"\"\" return", "(inp1, inp2), (unit1, unit2), ret_class def handle_preserve_units(inps, units, ufunc, ret_class):", "convert so we don't mix units with the same #", "2., 3., 2., 3., 4.]) cm \"\"\" v = np.concatenate(arrs,", "= self for k in d.attrs.keys(): del d.attrs[k] else: del", "will be prepended to the ``header`` and ``footer`` strings, to", "0.84509804]) YTArray is tightly integrated with yt datasets: >>> import", "= a.to_pint() \"\"\" from pint import UnitRegistry if unit_registry is", "'axis' in kwargs and kwargs['axis'] is not None: unit =", "inp2 = coerce_iterable_units(inputs[1]) unit1 = getattr(inp1, 'units', None) unit2 =", "if unit_str == \"h\": unit_str = \"hr\" ap_units.append(\"%s**(%s)\" % (unit_str,", "self).std(axis, dtype, out, ddof), self.units def __array_wrap__(self, out_arr, context=None): ret", "2., 3.]) cm \"\"\" v = np.intersect1d(arr1, arr2, assume_unique=assume_unique) v", "unit2): raise TypeError( \"Bit-twiddling operators are not defined for YTArray", "related by only a constant factor but not in the", "dtype=dtype, bypass_validation=bypass_validation) if ret.size > 1: raise RuntimeError(\"YTQuantity instances must", ">>> b + a 2.01 m NumPy ufuncs will pass", "return v def uconcatenate(arrs, axis=0): \"\"\"Concatenate a sequence of arrays.", "unit in zip(arrays, units)]) def savetxt(fname, arrays, fmt='%.18e', delimiter='\\t', header='',", "np.ndarray): pass elif iterable(input_array) and input_array: if isinstance(input_array[0], YTArray): return", "type(self) elif len(inputs) == 2: unit_operator = self._ufunc_registry[ufunc] inps, units,", "None: # Nothing provided. Make dimensionless... units = Unit() elif", "ret.units = units elif isinstance(input_units, Unit): ret.units = input_units else:", "new quantity in. equivalence : string, optional The equivalence you", "return_without_unit, log10: return_without_unit, expm1: return_without_unit, log1p: return_without_unit, sqrt: sqrt_unit, square:", "ap_units = \"*\".join(ap_units) if isinstance(arr.value, np.ndarray): return YTArray(arr.value, ap_units, registry=unit_registry)", "nextafter: preserve_units, modf: passthrough_unit, ldexp: bitop_units, frexp: return_without_unit, floor: passthrough_unit,", "pickle.loads(dataset.attrs['unit_registry'].tostring()) else: unit_lut = None f.close() registry = UnitRegistry(lut=unit_lut, add_default_symbols=False)", "ret_class = type(self) elif len(inputs) == 2: unit_operator = self._ufunc_registry[ufunc]", "None: new_units = _unit_repr_check_same(self.units, units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units) new_array", "if unit2.units.is_dimensionless: pass else: raise YTUnitOperationError(ufunc, unit1, unit2) unit2 =", "the right of the `-` from this ytarray. Must check", "new_units = _unit_repr_check_same(self.units, units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units) new_array =", "reciprocal, sin, cos, tan, arcsin, arccos, arctan, arctan2, \\ hypot,", "units, ufunc, ret_class, raise_error=True) unit = unit_operator(*units) if unit_operator in", "unary_operators: out_arr, inp, u = get_inp_u_unary(ufunc, inputs, out_arr) unit =", "cls(data, units, registry=registry) # # Start convenience methods # @property", "= np.random.random(10) >>> b = ureg.Quantity(a, \"erg/cm**3\") >>> c =", "for descriptions of the keyword arguments. The keepdims argument is", "remainder, mod, arctan2, hypot, bitwise_and, bitwise_or, bitwise_xor, left_shift, right_shift, greater,", "right_object) return super(YTArray, self).__div__(ro) def __rdiv__(self, left_object): \"\"\" See __div__.", "oth = sanitize_units_add(self, other, \"addition\") np.add(self, oth, out=self) return self", "into the pickle file unit, lut = str(state[0]), default_unit_symbol_lut.copy() #", "else: base = \"CGS\" raise YTEquivalentDimsError(my_units, other_units, base) if not", "object is not a YTArray, then one of the arrays", "option may produce corrupted, invalid units or array data, but", "of third element of the returned tuple, itself a tuple", "elif ufunc in (modf, divmod_): out_arr = tuple((ret_class(o, unit) for", "np.bitwise_and(self, other, out=self) return self def __pow__(self, power): \"\"\" Raise", "out_arr = np.multiply(out_arr.view(np.ndarray), unit.base_value, out=out) unit = Unit(registry=unit.registry) return out,", "f.close() @classmethod def from_hdf5(cls, filename, dataset_name=None, group_name=None): r\"\"\"Attempts read in", "v def uconcatenate(arrs, axis=0): \"\"\"Concatenate a sequence of arrays. This", "finfo = data.ds._get_field_info(field[0],field[1]) else: finfo = data.ds._get_field_info(field) if finfo.sampling_type ==", "View of this array's data. \"\"\" return self.view(np.ndarray) def to_ndarray(self):", "5, 6], 'm') >>> a + b YTArray([ 401., 502.,", "from. unit_registry : Pint UnitRegistry, optional The Pint UnitRegistry to", "[] for unit, pow in powers_dict.items(): # we have to", "1.0 return (inp1, inp2), (unit1, unit2), ret_class def handle_preserve_units(inps, units,", "'r') next_one = False units = [] num_cols = -1", "@lru_cache(maxsize=128, typed=False) def square_unit(unit): return unit*unit @lru_cache(maxsize=128, typed=False) def divide_units(unit1,", "return_without_unit, arctanh: return_without_unit, hypot: preserve_units, deg2rad: return_without_unit, rad2deg: return_without_unit, bitwise_and:", "be used if that particular equivalency requires them. Parameters ----------", "= v + (0.0, r'\\rm{' + k.replace('_', '\\ ') +", "can't be None. return False oth = validate_comparison_units(self, other, 'equal')", "unit conversion methods # def write_hdf5(self, filename, dataset_name=None, info=None, group_name=None):", "self def __neg__(self): \"\"\" Negate the data. \"\"\" return super(YTArray,", "to the dtype of the input data, or, if none", "or YTQuantity to a Pint Quantity. Parameters ---------- arr :", "out, ddof), self.units def __array_wrap__(self, out_arr, context=None): ret = super(YTArray,", "of this array with the unit information stripped \"\"\" return", "header += '\\n' header += \" Units\\n \" + '\\t'.join(units)", "operators # # # Begin reduction operators # @return_arr def", "absolute: passthrough_unit, fabs: passthrough_unit, rint: return_without_unit, sign: return_without_unit, conj: passthrough_unit,", "1/1 rather than # a dimensionless Unit object. if self.units.is_dimensionless", "info: dictionary A dictionary of supplementary info to write to", "return v def in_base(self, unit_system=\"cgs\"): \"\"\" Creates a copy of", "LooseVersion(np.__version__) < LooseVersion('1.13.0'): def __add__(self, right_object): \"\"\" Add this ytarray", "return_without_unit, hypot: preserve_units, deg2rad: return_without_unit, rad2deg: return_without_unit, bitwise_and: bitop_units, bitwise_or:", "unit = self._ufunc_registry[ufunc](u) ret_class = type(self) elif len(inputs) == 2:", "logaddexp, logaddexp2, true_divide, \\ floor_divide, negative, power, remainder, mod, absolute,", "sp[\"density\"] >>> b = sp[\"temperature\"] >>> c = sp[\"velocity_x\"] >>>", "by default. Examples -------- >>> a = YTArray([1,2,3], 'cm') >>>", "around np.stack that preserves units. \"\"\" v = np.stack(arrs) v", "{ add: preserve_units, subtract: preserve_units, multiply: multiply_units, divide: divide_units, logaddexp:", "= np.dot(op1.d, op2.d) units = op1.units*op2.units if dot.shape == ():", "units, ufunc, ret_class) elif unit_operator is preserve_units: inps, units =", "unit_operator in (multiply_units, divide_units): out_arr, out_arr, unit = handle_multiply_divide_units( unit,", "YTQuantity(np.array(out_arr), unit) else: if ret_class is YTQuantity: # This happens", "self is a YTArray, so it can't be None. return", "np.array(self) v = value @property def ndview(self): \"\"\"Get a view", "the YTArrays to. arrays : list of YTArrays or single", "= UnitRegistry(lut=lut, add_default_symbols=False) self.units = Unit(unit, registry=registry) def __deepcopy__(self, memodict=None):", "self).__sub__(ro) def __rsub__(self, left_object): \"\"\" See __sub__. \"\"\" lo =", "units type if input_units is None: # Nothing provided. Make", "quantity. Powers must be specified using python syntax (cm**3, not", "udot(op1, op2): \"\"\"Matrix or vector dot product that preserves units", "the data. \"\"\" # this needs to be defined for", "object. \"\"\" # let Unit() handle units arg if it's", "------- View of this array's data. \"\"\" return self.view(np.ndarray) def", "separate values. By default, this is any whitespace. usecols :", "return cls1 if issubclass(cls1, YTQuantity): return cls2 if issubclass(cls2, YTQuantity):", "out_arr = YTQuantity(np.asarray(out_arr), unit) else: if ret_class is YTQuantity: #", "__truediv__(self, right_object): ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__truediv__(ro) def", "unit in *equiv*. \"\"\" return self.units.has_equivalent(equiv) def ndarray_view(self): \"\"\" Returns", "attach units to input_units : String unit specification, unit symbol", "comment; default: '#'. Examples -------- >>> temp, velx = yt.loadtxt(\"sphere.dat\",", "YTQuantity): return cls2 if issubclass(cls2, YTQuantity): return cls1 if issubclass(cls1,", "\\ greater, greater_equal, less, less_equal, not_equal, equal, logical_and, \\ logical_or,", "registry=registry) def __deepcopy__(self, memodict=None): \"\"\"copy.deepcopy implementation This is necessary for", "= np.array(out_arr, copy=False) return out_arr out_arr.units = unit if out_arr.size", "np.number, list, tuple)): return cls2 if cls2 in (np.ndarray, np.matrix,", "Test if this is equal to the object on the", "\"\"\" See __mul__. \"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray,", "ret_class) elif unit_operator is preserve_units: inps, units = handle_preserve_units( inps,", "if finfo.sampling_type == 'particle': units = finfo.output_units else: units =", "deg2rad, rad2deg, invert, logical_not, isreal, iscomplex, isfinite, isinf, isnan, signbit,", "ureg.Quantity(a, \"erg/cm**3\") >>> c = yt.YTArray.from_pint(b) \"\"\" p_units = []", "ufunc has not been added \" \"to YTArray.\" % str(context[0]))", "and d.dtype == self.dtype: d[...] = self for k in", "to the sympy expression 1/1 rather than # a dimensionless", "if cls2 in (np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls2, (numeric_type, np.number,", "__sub__(self, right_object): \"\"\" Subtract the object on the right of", "ldexp: bitop_units, frexp: return_without_unit, floor: passthrough_unit, ceil: passthrough_unit, trunc: passthrough_unit,", "std(self, axis=None, dtype=None, out=None, ddof=0): return super(YTArray, self).std(axis, dtype, out,", "conversion_factor if offset: np.subtract(self, offset*self.uq, self) return self def convert_to_base(self,", "ret = input_array.view(cls) if input_units is None: if registry is", "inp1 = coerce_iterable_units(inputs[0]) inp2 = coerce_iterable_units(inputs[1]) unit1 = getattr(inp1, 'units',", "Check to see if this YTArray or YTQuantity has an", "square_unit(unit): return unit*unit @lru_cache(maxsize=128, typed=False) def divide_units(unit1, unit2): return unit1/unit2", "= self.units.get_conversion_factor(new_units) new_array = type(self)(self.ndview * conversion_factor, new_units) if offset:", "= np.concatenate(arrs, axis=axis) v = validate_numpy_wrapper_units(v, arrs) return v def", "as a yt UnitRegistry object. Examples -------- >>> a =", "and quantities. \"\"\" if memodict is None: memodict = {}", "(modf, divmod_): out_arr = tuple((ret_class(o, unit) for o in out_arr))", "AstroPy quantity with the same unit information. \"\"\" if _astropy.units", "hdf5 file. dataset_name: string The name of the dataset to", "arrays = [arrays] units = [] for array in arrays:", "to see if this YTArray or YTQuantity has an equivalent", "np.linalg.norm(data, ord=ord, axis=axis, keepdims=keepdims) if norm.shape == (): return YTQuantity(norm,", "units. Parameters ---------- unit_system : string, optional The unit system", "YTArray or YTQuantity. \"\"\" self.units.list_equivalencies() def has_equivalent(self, equiv): \"\"\" Check", "cos, tan, arcsin, arccos, arctan, arctan2, \\ hypot, sinh, cosh,", "return_without_unit, tanh: return_without_unit, arcsin: return_without_unit, arccos: return_without_unit, arctan: return_without_unit, arctan2:", "dataset to read from. If the dataset has a units", "out=out, **kwargs) if unit_operator in (multiply_units, divide_units): out, out_arr, unit", "given units. Parameters ---------- units : Unit object or str", "the same number of values. Parameters ---------- fname : str", "check for the correct (same dimension) units. \"\"\" ro =", "arrays. \"\"\" np_ret = super(YTArray, self).__reduce__() obj_state = np_ret[2] unit_state", "ds.arr(np.ones(5), 'code_length') >>> a.in_cgs() YTArray([ 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24])", "import _astropy from sympy import Rational from yt.units.unit_lookup_table import \\", "(input_array, input_units) ) if isinstance(input_array, YTArray): ret = input_array.view(cls) if", "@classmethod def from_hdf5(cls, filename, dataset_name=None, group_name=None): r\"\"\"Attempts read in and", "= sanitize_units_add(self, left_object, \"addition\") return super(YTArray, self).__radd__(lo) def __iadd__(self, other):", "arrs[1:]): raise RuntimeError(\"Your arrays must have identical units.\") v.units =", "an old pickle file # created before we serialized the", "operator. Must check for the correct (same dimension) units. \"\"\"", "cls1 if issubclass(cls1, YTQuantity): return cls2 if issubclass(cls2, YTQuantity): return", "convert to \" + \"an AstroPy quantity.\") return self.value*_astropy.units.Unit(str(self.units), **kwargs)", "\"h\" if unit_str == \"h\": unit_str = \"hr\" ap_units.append(\"%s**(%s)\" %", "# We first cast to be our class type obj", "annoying to deal with them. >>> np.log10(a) array([ -inf, 0.", "np.subtract(new_array, offset*new_array.uq, new_array) return new_array else: return self.to_equivalent(units, equivalence, **kwargs)", "header : str, optional String that will be written at", "filename, dataset_name=None, info=None, group_name=None): r\"\"\"Writes a YTArray to hdf5 file.", "not power: unit2 = Unit(registry=getattr(unit1, 'registry', None)) elif ufunc is", "elif len(inputs) == 2: unit_operator = self._ufunc_registry[ufunc] inps, units, ret_class", "def __getitem__(self, item): ret = super(YTArray, self).__getitem__(item) if ret.shape ==", "be the first dimension and if ``axis=-1`` it will be", "this instead: \\n\" \"ds.arr(%s, \\\"%s\\\")\" % (input_array, input_units) ) if", "__repr__(self): return str(self) def validate_numpy_wrapper_units(v, arrs): if not any(isinstance(a, YTArray)", "units = [] num_cols = -1 for line in f.readlines():", "where appropriate. >>> import numpy as np >>> a =", "may produce corrupted, invalid units or array data, but can", "and registry is not input_units.registry: units = Unit(str(input_units), registry=registry) else:", "http://docs.python.org/2/library/pickle.html Unit metadata is encoded in the zeroth element of", "the documentation of numpy.cross for full details. \"\"\" v =", "not in the same units. Parameters ---------- unit : string", ": A UnitRegistry object The registry to create units from.", "preserves units. \"\"\" v = np.stack(arrs) v = validate_numpy_wrapper_units(v, arrs)", "bytes)): if input_units.startswith('code_'): raise UnitParseError( \"Code units used without referring", "super(YTArray, self).__mul__(ro) def __rmul__(self, left_object): \"\"\" See __mul__. \"\"\" lo", "greater_equal, less, less_equal, not_equal, equal, logical_and, logical_or, logical_xor, maximum, minimum,", "yt.utilities.on_demand_imports import _h5py as h5py from yt.extern.six.moves import cPickle as", "Powers must be specified using python syntax (cm**3, not cm^3).", "if cls1 in (np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls1, (numeric_type, np.number,", "conversion_factor, new_units) if offset: np.subtract(new_array, offset*new_array.uq, new_array) return new_array else:", "dataset = g[dataset_name] data = dataset[:] units = dataset.attrs.get('units', '')", "__ne__(self, other): \"\"\" Test if this is not equal to", "else: if hasattr(self, 'units'): ret.units = self.units return ret #", "delimiter : str, optional String or character separating columns. header", "attribute set to the sympy expression 1/1 rather than #", "if self.units.is_dimensionless and power == -1: ret = super(YTArray, self).__pow__(power)", "cm >>> b + a YTArray([ 4.01, 5.02, 6.03]) m", "Arrays will be \" \"dimensionless!\") units = [\"dimensionless\"]*num_cols arrays =", "np.array(out_arr, copy=False) return out_arr out_arr.units = unit if out_arr.size ==", "or this_equiv._one_way) if self.has_equivalent(equiv) and oneway_or_equivalent: new_arr = this_equiv.convert( self,", "object or str The units you want to convert to.", "wrapper around np.dot that preserves units. \"\"\" dot = np.dot(op1.d,", "= context[1] if ufunc in unary_operators: out_arr, inp, u =", "divmod_, isnat, heaviside except ImportError: positive, divmod_, isnat, heaviside =", "units of the quantity. Powers must be specified using python", "operators are not defined for YTArray instances\") def get_inp_u_unary(ufunc, inputs,", "return_without_unit, arctan2: arctan2_unit, arcsinh: return_without_unit, arccosh: return_without_unit, arctanh: return_without_unit, hypot:", "happens when we try to load an old pickle file", "ufunc in (modf, divmod_): out_arr = tuple((ret_class(o, unit) for o", "from yt.utilities.lru_cache import lru_cache from numbers import Number as numeric_type", "create a copy of the data in the iterable. return", "through units where appropriate. >>> import numpy as np >>>", "ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.arr(np.ones(5), 'code_length') >>> a.in_cgs()", "ceil: passthrough_unit, trunc: passthrough_unit, spacing: passthrough_unit, positive: passthrough_unit, divmod_: passthrough_unit,", "spacing try: # numpy 1.13 or newer from numpy import", "negative, absolute, rint, sign, conj, exp, exp2, log, log2, log10,", "try the :meth:`list_equivalencies` method. Default: None Returns ------- YTArray \"\"\"", "units, registry=registry) return arr def uintersect1d(arr1, arr2, assume_unique=False): \"\"\"Find the", "self.units @return_arr def sum(self, axis=None, dtype=None, out=None): return super(YTArray, self).sum(axis,", "inp2), (unit1, unit2), ret_class def handle_preserve_units(inps, units, ufunc, ret_class): if", "0.30103 , 0.47712125, 0.60205999, 0.69897 , 0.77815125, 0.84509804]) YTArray is", "= coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) # If the other object", "a YTQuantity with # size > 1 return YTArray(np.array(out_arr), unit)", "subclass that attaches a symbolic unit object to the array", "written at the end of the file. comments : str,", "or YTQuantity. \"\"\" self.units.list_equivalencies() def has_equivalent(self, equiv): \"\"\" Check to", "(multiply, divide) and method == 'reduce': power_sign = POWER_SIGN_MAPPING[ufunc] if", "of arrays and quantities. \"\"\" if memodict is None: memodict", "ret.view(YTArray) if context is None: if ret.shape == (): return", "the object on the right of the `*` operator. The", "ndarray instance # We first cast to be our class", "been\" \"added to YTArray.\" % (str(ufunc), len(inputs))) if unit is", "arctan2_unit): inps, units = handle_comparison_units( inps, units, ufunc, ret_class) elif", "yt.YTArray([2, 3, 4], 'cm') >>> uintersect1d(A, B) YTArray([ 2., 3.])", "AstroPy Quantity The Quantity to convert from. unit_registry : yt", "self.units return ret # # Start operation methods # if", "= np.multiply(out_arr.view(np.ndarray), unit.base_value, out=out) unit = Unit(registry=unit.registry) return out, out_arr,", "= YTQuantity(np.asarray(out_arr), unit) else: if ret_class is YTQuantity: # This", "unit symbol lookup table # into the pickle file unit,", "format changed if len(lut['m']) == 2: lut.update(default_unit_symbol_lut) for k, v", "raise YTUnitOperationError(ufunc, *units) else: if raise_error: raise YTUfuncUnitError(ufunc, *units) inps", "not np.any(other_object): return ret.view(np.ndarray) elif not np.any(this_object): return ret raise", "with the same unit as this array and a value", "array \"\"\" if units is None: v = self.value else:", "unique elements of the two input arrays. A wrapper around", "the `/` operator. \"\"\" ro = sanitize_units_mul(self, right_object) return super(YTArray,", "= unit_array def __getitem__(self, item): ret = super(YTArray, self).__getitem__(item) if", "This is a wrapper around np.hstack that preserves units. \"\"\"", "yt >>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.arr(np.ones(5), 'code_length')", "= True else: # Here we catch the first line", "= yt.YTArray(1.0e7,\"K\") >>> a.to_equivalent(\"keV\", \"thermal\") \"\"\" conv_unit = Unit(unit, registry=self.units.registry)", "= input_array.view(cls) if input_units is None: if registry is None:", "index of the new axis in the dimensions of the", ") if any([ff != getattr(_, 'units', NULL_UNIT) for _ in", "getattr(ufunc, method) if 'out' in kwargs: out_orig = kwargs.pop('out') out", "specified, this will be used instead of the registry associated", "order='C'): return type(self)(np.copy(np.asarray(self)), self.units) def __array_finalize__(self, obj): if obj is", "ret.units = input_array.units else: units = Unit(str(input_array.units), registry=registry) ret.units =", "return YTArray(norm, data.units) def udot(op1, op2): \"\"\"Matrix or vector dot", "YTArray([4, 3, 2, 1, 0, 1, 2, 3]) g/cm**3 and", "Test if this is less than the object on the", "single format (%10.5f), or a sequence of formats. delimiter :", "elif out_arr.size == 1: out_arr = YTQuantity(np.asarray(out_arr), unit) else: if", "units, and returns it without units. Output is therefore a", "bare NumPy array. Optionally, an equivalence can be specified to", "YTArray(np.ndarray): \"\"\" An ndarray subclass that attaches a symbolic unit", "* arr2.units arr = YTArray(v, units, registry=registry) return arr def", "# @return_arr def prod(self, axis=None, dtype=None, out=None): if axis is", "logical_xor: comparison_unit, logical_not: return_without_unit, maximum: preserve_units, minimum: preserve_units, fmax: preserve_units,", "The string used to separate values. By default, this is", "units as well. group_name: string An optional group to read", "reciprocal, sin, cos, tan, arcsin, arccos, arctan, sinh, cosh, tanh,", "rather than # a dimensionless Unit object. if self.units.is_dimensionless and", "= [arrays] units = [] for array in arrays: if", "\"\"\" Subtract the object on the right of the `-`", "from numpy import \\ add, subtract, multiply, divide, logaddexp, logaddexp2,", "is less than or equal to the object on the", "YTArray(np.arange(8) - 4, 'g/cm**3') >>> np.abs(a) YTArray([4, 3, 2, 1,", "log2, log10, expm1, log1p, sqrt, square, \\ reciprocal, sin, cos,", "bitwise_xor, invert, left_shift, right_shift, \\ greater, greater_equal, less, less_equal, not_equal,", "= sanitize_units_add(self, right_object, \"subtraction\") return super(YTArray, self).__sub__(ro) def __rsub__(self, left_object):", "+ k.replace('_', '\\ ') + '}') registry = UnitRegistry(lut=lut, add_default_symbols=False)", "input arrays must have the same units. See the documentation", "return_without_unit, cosh: return_without_unit, tanh: return_without_unit, arcsin: return_without_unit, arccos: return_without_unit, arctan:", "sanitize_units_mul(self, right_object) return super(YTArray, self).__div__(ro) def __rdiv__(self, left_object): \"\"\" See", "= out.flat[:] if isinstance(out_orig[0], YTArray): out_orig[0].units = unit return out_arr", "units or array data, but can lead to significant speedups", "__div__. \"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rfloordiv__(lo) def", "if dtype is None: dtype = getattr(input_array, 'dtype', np.float64) if", "in g.keys(): d = g[dataset_name] # Overwrite without deleting if", "of the dataset to read from. If the dataset has", "\"\"\"Get a YTQuantity with the same unit as this array", "yt.savetxt(\"sphere.dat\", [a,b,c], header='My sphere stuff', delimiter=\"\\t\") \"\"\" if not isinstance(arrays,", "the equivalent mks units, and returns it. Returns ------- Quantity", "and returns it without units. Output is therefore a bare", "= Unit(registry=unit.registry) return out, out_arr, unit def coerce_iterable_units(input_object): if isinstance(input_object,", "the data in the supplied units, and returns it without", "iterable(input_array) and input_array: if isinstance(input_array[0], YTArray): return YTArray(np.array(input_array, dtype=dtype), input_array[0].units,", "to do something like this instead: \\n\" \"ds.arr(%s, \\\"%s\\\")\" %", "# dimensionless or filled with zeros if not inp.units.is_dimensionless and", "less than the object on the right. \"\"\" # converts", "(str(ufunc), len(inputs))) if unit is None: out_arr = np.array(out_arr, copy=False)", "are datasets at the top level by default. Examples --------", "if hasattr(array, \"units\"): units.append(str(array.units)) else: units.append(\"dimensionless\") if header != '':", "( conv_unit.has_equivalent(equiv) or this_equiv._one_way) if self.has_equivalent(equiv) and oneway_or_equivalent: new_arr =", "requires them. Parameters ---------- units : Unit object or string", "\"\"\" Divide this YTArray by the object on the right", "than or equal to other. \"\"\" # Check that the", "for this unitful quantity, try the :meth:`list_equivalencies` method. Examples --------", "the dataset to create in the file. info: dictionary A", "def unit_array(self): \"\"\"Get a YTArray filled with ones with the", "raise YTUnitOperationError(op_string, inp.units, dimensionless) return ret def validate_comparison_units(this, other, op_string):", "input_units='') return super(YTArray, self).__pow__(power) def __abs__(self): \"\"\" Return a YTArray", "would be annoying to deal with them. >>> np.log10(a) array([", "a.to_equivalent(\"keV\", \"thermal\") \"\"\" conv_unit = Unit(unit, registry=self.units.registry) if self.units.same_dimensions_as(conv_unit): return", "None: # self is a YTArray, so it can't be", "whitespace. usecols : sequence, optional Which columns to read, with", "YTEquivalentDimsError from yt.utilities.lru_cache import lru_cache from numbers import Number as", "= 2.0 def __new__(cls, input_array, input_units=None, registry=None, dtype=None, bypass_validation=False): if", "to mark them as comments. Default: '# ', as expected", "lut if the pickle was saved prior to PR #1728", "mark them as comments. Default: '# ', as expected by", "def ucross(arr1, arr2, registry=None, axisa=-1, axisb=-1, axisc=-1, axis=None): \"\"\"Applies the", "---------- units : Unit object or str The units you", "\"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rfloordiv__(lo) def __ifloordiv__(self,", "numpy version equal to or newer than 1.13 def __array_ufunc__(self,", "'+self.units.__repr__() def __str__(self): \"\"\" \"\"\" return str(self.view(np.ndarray)) + ' '", "the dataset has a units attribute, attempt to infer units", "def __iadd__(self, other): \"\"\" See __add__. \"\"\" oth = sanitize_units_add(self,", "# a dimensionless Unit object. if self.units.is_dimensionless and power ==", "offset) = self.units.get_conversion_factor(new_units) self.units = new_units values = self.d values", "as h5py from yt.extern.six.moves import cPickle as pickle if info", "unit = u**(power_sign*inp.shape[kwargs['axis']]) else: unit = u**(power_sign*inp.size) else: unit =", "cm \"\"\" v = np.intersect1d(arr1, arr2, assume_unique=assume_unique) v = validate_numpy_wrapper_units(v,", "``header`` and ``footer`` strings, to mark them as comments. Default:", "(%10.5f), or a sequence of formats. delimiter : str, optional", "# # # Begin reduction operators # @return_arr def prod(self,", "in (np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls1, (numeric_type, np.number, list, tuple)):", "to create and write a dataset to dataset_name: string The", "case of adding or subtracting with zero or # array", "input_object]): raise YTIterableUnitCoercionError(input_object) # This will create a copy of", "See __div__. \"\"\" oth = sanitize_units_mul(self, other) np.floor_divide(self, oth, out=self)", "a = YTQuantity(12, 'g/cm**3') >>> np.abs(a) 12 g/cm**3 and strip", "the given units. Parameters ---------- units : Unit object or", "\\ YTUfuncUnitError, YTIterableUnitCoercionError, \\ YTInvalidUnitEquivalence, YTEquivalentDimsError from yt.utilities.lru_cache import lru_cache", "import positive, divmod as divmod_, isnat, heaviside except ImportError: positive,", "input_units *must* be a valid unit object. Defaults to False.", "vertically (row wise) while preserving units This is a wrapper", "for col in usecols] mylog.info(\"Array units: %s\" % \", \".join(units))", "YTArray([ 1., 2., 3., 2., 3., 4.]) cm \"\"\" v", ": str, optional String that will be written at the", ": an integer or floating point scalar The scalar to", "if issubclass(cls1, cls2): return cls1 if issubclass(cls2, cls1): return cls2", "out_orig[0].units = unit return out_arr def copy(self, order='C'): return type(self)(np.copy(np.asarray(self)),", "units[0] != units[1]: u1d = units[0].is_dimensionless u2d = units[1].is_dimensionless any_nonzero", "yt.funcs def iterable(obj): try: len(obj) except: return False return True", "None def invert_units(unit): raise TypeError( \"Bit-twiddling operators are not defined", "yt.units.unit_object import Unit, UnitParseError from yt.units.unit_registry import UnitRegistry from yt.units.dimensions", "from sympy import Rational from yt.units.unit_lookup_table import \\ default_unit_symbol_lut from", "new_units values = self.d values *= conversion_factor if offset: np.subtract(self,", "for array in arrays: if hasattr(array, \"units\"): units.append(str(array.units)) else: units.append(\"dimensionless\")", "f[dataset_name] d = g.create_dataset(dataset_name, data=self) else: d = g.create_dataset(dataset_name, data=self)", "v def unorm(data, ord=None, axis=None, keepdims=False): \"\"\"Matrix or vector norm", "\"SI\" else: base = \"CGS\" raise YTEquivalentDimsError(my_units, other_units, base) if", "units. See the documentation of numpy.intersect1d for full details. Examples", "will be the last dimension. This is a wrapper around", "in f.readlines(): words = line.strip().split() if len(words) == 0: continue", "# converts if possible oth = validate_comparison_units(self, other, 'less_than') return", "= input_units else: # units kwarg set, but it's not", "YTQuantity: # This happens if you do ndarray * YTQuantity.", "= YTArray.__new__(cls, input_scalar, input_units, registry, dtype=dtype, bypass_validation=bypass_validation) if ret.size >", "newer from numpy import positive, divmod as divmod_, isnat, heaviside", "lo = sanitize_units_add(self, left_object, \"subtraction\") return super(YTArray, self).__rsub__(lo) def __isub__(self,", "preserve_units, subtract: preserve_units, multiply: multiply_units, divide: divide_units, logaddexp: return_without_unit, logaddexp2:", "type(self)(ret, input_units='') return super(YTArray, self).__pow__(power) def __abs__(self): \"\"\" Return a", "be specified using python syntax (cm**3, not cm^3). registry :", "Examples -------- >>> a = YTArray([1,2,3], 'cm') >>> myinfo =", "single YTArray The array(s) to write to the file. fmt", "def uunion1d(arr1, arr2): \"\"\"Find the union of two arrays. A", "Parameters ---------- input_scalar : an integer or floating point scalar", "bitwise_or, bitwise_xor, left_shift, right_shift, greater, greater_equal, less, less_equal, not_equal, equal,", "arr if isinstance(x, np.ndarray): return data.ds.arr(x, units) else: return data.ds.quan(x,", "units you want to get the bare quantity in. If", "The unit that you wish to convert to. equiv :", "from yt.units.unit_object import Unit, UnitParseError from yt.units.unit_registry import UnitRegistry from", "mod: preserve_units, fmod: preserve_units, absolute: passthrough_unit, fabs: passthrough_unit, rint: return_without_unit,", "'cm') >>> uintersect1d(A, B) YTArray([ 2., 3.]) cm \"\"\" v", "= np.hstack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v def ustack(arrs,", "self).__rxor__(left_object) def __ixor__(self, other): np.bitwise_xor(self, other, out=self) return self def", "Convert the array and units to the equivalent mks units.", "A tuple, list, or array to attach units to input_units", "for _ in input_object]): raise YTIterableUnitCoercionError(input_object) # This will create", "rad2deg, invert, logical_not, isreal, iscomplex, isfinite, isinf, isnan, signbit, floor,", "row in the text file must have the same number", "else: return input_object def sanitize_units_mul(this_object, other_object): inp = coerce_iterable_units(this_object) ret", "let the Unit class handle if # it's a str.", ">>> E.convert_to_base(unit_system=\"galactic\") \"\"\" return self.convert_to_units(self.units.get_base_equivalent(unit_system)) def convert_to_cgs(self): \"\"\" Convert the", "return ret elif isinstance(input_array, np.ndarray): pass elif iterable(input_array) and input_array:", "bypass_validation=bypass_validation) if ret.size > 1: raise RuntimeError(\"YTQuantity instances must be", "def preserve_units(unit1, unit2=None): return unit1 @lru_cache(maxsize=128, typed=False) def power_unit(unit, power):", "sequence of formats. delimiter : str, optional String or character", "def savetxt(fname, arrays, fmt='%.18e', delimiter='\\t', header='', footer='', comments='#'): r\"\"\" Write", "necessary for stdlib deepcopy of arrays and quantities. \"\"\" if", "out=self) return self def __and__(self, right_object): return super(YTArray, self).__and__(right_object) def", "but as an ndarray rather than ytarray. Returns ------- View", "= sanitize_units_mul(self, other) np.divide(self, oth, out=self) return self def __truediv__(self,", "else: return new_arr.in_units(unit) else: raise YTInvalidUnitEquivalence(equiv, self.units, unit) def list_equivalencies(self):", "------- NumPy array \"\"\" if units is None: v =", "= YTArray(np.arange(8) - 4, 'g/cm**3') >>> np.abs(a) YTArray([4, 3, 2,", "pickle format changed if len(lut['m']) == 2: lut.update(default_unit_symbol_lut) for k,", "\"\"\" Posify the data. \"\"\" # this needs to be", "than # a dimensionless Unit object. if self.units.is_dimensionless and power", "self for k in d.attrs.keys(): del d.attrs[k] else: del f[dataset_name]", "------- YTArray \"\"\" if equivalence is None: new_units = _unit_repr_check_same(self.units,", "mks units, and returns it. Returns ------- Quantity object with", "other): np.bitwise_xor(self, other, out=self) return self def __and__(self, right_object): return", "# If the other object is a YTArray and has", "Check that other is a YTArray. if hasattr(other, 'units'): if", "yt unit registry to use in the conversion. If one", "that preserves units. \"\"\" v = np.hstack(arrs) v = validate_numpy_wrapper_units(v,", "unit def coerce_iterable_units(input_object): if isinstance(input_object, np.ndarray): return input_object if iterable(input_object):", "str, optional String that will be written at the end", "= Unit() POWER_SIGN_MAPPING = {multiply: 1, divide: -1} # redefine", "= input_array.units else: units = Unit(str(input_array.units), registry=registry) ret.units = units", "lut = str(state[0]), default_unit_symbol_lut.copy() # need to fix up the", "unit objects handle being multiplied. \"\"\" ro = sanitize_units_mul(self, right_object)", "if group_name is not None: if group_name in f: g", "return_without_unit, floor: passthrough_unit, ceil: passthrough_unit, trunc: passthrough_unit, spacing: passthrough_unit, positive:", "import Number as numeric_type from yt.utilities.on_demand_imports import _astropy from sympy", "units elif isinstance(input_units, Unit): ret.units = input_units else: ret.units =", "def __ior__(self, other): np.bitwise_or(self, other, out=self) return self def __xor__(self,", "right_object) return super(YTArray, self).__truediv__(ro) def __rtruediv__(self, left_object): \"\"\" See __div__.", "yt UnitRegistry object. Examples -------- >>> a = YTQuantity(4.0, \"cm**2/s\")", "arguments. The keepdims argument is ignored if the version of", "UnitRegistry >>> import numpy as np >>> ureg = UnitRegistry()", "self).__array_wrap__(out_arr, context) if isinstance(ret, YTQuantity) and ret.shape != (): ret", "ret # # Start operation methods # if LooseVersion(np.__version__) <", "ndview(self): \"\"\"Get a view of the array data.\"\"\" return self.ndarray_view()", "right_object): ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__floordiv__(ro) def __rfloordiv__(self,", "extract the 2nd, 5th and 6th columns. The default, None,", "# # The full license is in the file COPYING.txt,", "type(self)(np.copy(np.asarray(self)), self.units) def __array_finalize__(self, obj): if obj is None and", "'dtype', np.float64) if bypass_validation is True: obj = np.asarray(input_array, dtype=dtype).view(cls)", "... info=myinfo) \"\"\" from yt.utilities.on_demand_imports import _h5py as h5py from", "used if that particular equivalency requires them. Parameters ---------- units", "def __array_wrap__(self, out_arr, context=None): ret = super(YTArray, self).__array_wrap__(out_arr, context) if", "#----------------------------------------------------------------------------- # Copyright (c) 2013, yt Development Team. # #", "__rand__(self, left_object): return super(YTArray, self).__rand__(left_object) def __iand__(self, other): np.bitwise_and(self, other,", "in __reduce__ and then serialized by pickle. \"\"\" super(YTArray, self).__setstate__(state[1:])", "exp2, log, log2, log10, expm1, log1p, sqrt, square, \\ reciprocal,", "not cm^3). registry : ~yt.units.unit_registry.UnitRegistry The registry to create units", "unit is None: out_arr = np.array(out_arr, copy=False) return out_arr out_arr.units", "col_words: float(word) num_cols = len(col_words) break except ValueError: mylog.warning(\"Unrecognized character", "will be used. NOTE: This is not the same as", "ret = super(YTArray, self).__getitem__(item) if ret.shape == (): return YTQuantity(ret,", "to the given units. Parameters ---------- units : Unit object", "zero or # array filled with zero if not np.any(other_object):", "unit2=None): return None def arctan2_unit(unit1, unit2): return NULL_UNIT def comparison_unit(unit1,", "greater: comparison_unit, greater_equal: comparison_unit, less: comparison_unit, less_equal: comparison_unit, not_equal: comparison_unit,", "YTArrays with unit information to a text file. Parameters ----------", "isinstance(input_array, YTArray): ret = input_array.view(cls) if input_units is None: if", "and check that it is compatible with this quantity. Returns", "supplied units, and returns it. Optionally, an equivalence can be", "NumPy ufuncs will pass through units where appropriate. >>> import", "other_units.dimensions) return other_units unary_operators = ( negative, absolute, rint, sign,", "if unit.is_dimensionless and unit.base_value != 1.0: if not units[0].is_dimensionless: if", "bs = convert_pint_units(base) p_units.append(\"%s**(%s)\" % (bs, Rational(exponent))) p_units = \"*\".join(p_units)", "if other is None: return True oth = validate_comparison_units(self, other,", "else: norm = np.linalg.norm(data, ord=ord, axis=axis, keepdims=keepdims) if norm.shape ==", "\"\"\" Creates a new AstroPy quantity with the same unit", ">>> a.write_hdf5('test_array_data.h5', dataset_name='dinosaurs', ... info=myinfo) \"\"\" from yt.utilities.on_demand_imports import _h5py", ">>> import numpy as np >>> a = YTArray(np.arange(8) -", "this array with the data in the supplied units, and", "Test if this is not equal to the object on", "def __add__(self, right_object): \"\"\" Add this ytarray to the object", "import ytLogger as mylog from .pint_conversions import convert_pint_units NULL_UNIT =", "[arrays] units = [] for array in arrays: if hasattr(array,", "be scalars\") return ret def __repr__(self): return str(self) def validate_numpy_wrapper_units(v,", "YTArray. if hasattr(other, 'units'): if this.units.expr is other.units.expr: if this.units.base_value", "2: lut.update(default_unit_symbol_lut) for k, v in [(k, v) for k,", "this.units.same_dimensions_as(other.units): raise YTUnitOperationError(op_string, this.units, other.units) return other.in_units(this.units) return other @lru_cache(maxsize=128,", "units where appropriate. >>> import numpy as np >>> a", "'\\n' header += \" Units\\n \" + '\\t'.join(units) np.savetxt(fname, np.transpose(arrays),", "sin, cos, tan, arcsin, arccos, arctan, sinh, cosh, tanh, arcsinh,", "something that is related by only a constant factor but", "array. Powers must be specified using python syntax (cm**3, not", "_h5py as h5py from yt.extern.six.moves import cPickle as pickle if", "inps, units def handle_comparison_units(inps, units, ufunc, ret_class, raise_error=False): if units[0]", "if out_arr.size == 1: return YTQuantity(np.array(out_arr), unit) else: if ret_class", "* YTQuantity. Explicitly # casting to YTArray avoids creating a", "(): return YTQuantity(dot, units) return YTArray(dot, units) def uvstack(arrs): \"\"\"Stack", "def __rxor__(self, left_object): return super(YTArray, self).__rxor__(left_object) def __ixor__(self, other): np.bitwise_xor(self,", "= dataset[:] units = dataset.attrs.get('units', '') if 'unit_registry' in dataset.attrs.keys():", "\"\"\" ro = sanitize_units_add(self, right_object, \"subtraction\") return super(YTArray, self).__sub__(ro) def", "= getattr(inp, 'units', None) if u is None: u =", "None: dtype = getattr(input_array, 'dtype', np.float64) if bypass_validation is True:", "np.log10(a) array([ -inf, 0. , 0.30103 , 0.47712125, 0.60205999, 0.69897", "+ unit_state + np_ret[3:] return new_ret def __setstate__(self, state): \"\"\"Pickle", "unit def return_without_unit(unit, unit2=None): return None def arctan2_unit(unit1, unit2): return", "from this ytarray. Must check for the correct (same dimension)", "+= \" Units\\n \" + '\\t'.join(units) np.savetxt(fname, np.transpose(arrays), header=header, fmt=fmt,", "a copy of the array data as a numpy ndarray\"\"\"", "the data. \"\"\" return super(YTArray, self).__abs__() # # Start comparison", "out_arr = np.array(out_arr, copy=False) elif ufunc in (modf, divmod_): out_arr", "level by default. \"\"\" import h5py from yt.extern.six.moves import cPickle", "arr2, assume_unique=False): \"\"\"Find the sorted unique elements of the two", "we try to load an old pickle file # created", "Examples -------- >>> A = yt.YTArray([1, 2, 3], 'cm') >>>", "other, 'less_than or equal') return super(YTArray, self).__le__(oth) def __eq__(self, other):", "that system's base units. Parameters ---------- unit_system : string, optional", "to fix up the lut if the pickle was saved", "if current_mks in equiv_dims.free_symbols: base = \"SI\" else: base =", "return new_arr.in_units(unit) else: raise YTInvalidUnitEquivalence(equiv, self.units, unit) def list_equivalencies(self): \"\"\"", "Parameters ---------- unit_system : string, optional The unit system to", "\"h\": unit_str = \"hr\" ap_units.append(\"%s**(%s)\" % (unit_str, Rational(exponent))) ap_units =", "them as comments. Default: '# ', as expected by e.g.", "Parameters ---------- fname : str The file to write the", "the metadata extracted in __reduce__ and then serialized by pickle.", "def __new__(cls, input_scalar, input_units=None, registry=None, dtype=np.float64, bypass_validation=False): if not isinstance(input_scalar,", "add, subtract, multiply, divide, logaddexp, logaddexp2, true_divide, power, remainder, mod,", "cls2 in (np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls2, (numeric_type, np.number, list,", "= value @property def ndview(self): \"\"\"Get a view of the", "specified unit system, and returns it in that system's base", "significant speedups in the input validation logic adds significant overhead.", "power.unit) # Work around a sympy issue (I think?) #", "def __xor__(self, right_object): return super(YTArray, self).__xor__(right_object) def __rxor__(self, left_object): return", "equivalent mks units. \"\"\" return self.convert_to_units(self.units.get_mks_equivalent()) def in_units(self, units, equivalence=None,", "has an equivalent unit in *equiv*. \"\"\" return self.units.has_equivalent(equiv) def", "raise YTUfuncUnitError(ufunc, *units) inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps,", "yt import YTArray >>> a = YTArray([1, 2, 3], 'cm')", "A single format (%10.5f), or a sequence of formats. delimiter", "if isinstance(x, YTArray): arr = copy.deepcopy(x) arr.convert_to_units(units) return arr if", "for stdlib deepcopy of arrays and quantities. \"\"\" if memodict", "np.multiply(self, oth, out=self) return self def __div__(self, right_object): \"\"\" Divide", "the file. fmt : str or sequence of strs, optional", "equivalent to: >>> b = YTArray(np.ones(5), 'code_length', registry=ds.unit_registry) >>> np.all(a", "= YTQuantity(2.5, \"erg/s\") >>> E_new = E.in_base(unit_system=\"galactic\") \"\"\" return self.in_units(self.units.get_base_equivalent(unit_system))", "The default, None, results in all columns being read. comments", "read. dtype : data-type, optional Data-type of the resulting array;", "= super(YTArray, self).__pow__(power) return type(self)(ret, input_units='') return super(YTArray, self).__pow__(power) def", "hdf5 file into a YTArray. Parameters ---------- filename: string The", "return self def __sub__(self, right_object): \"\"\" Subtract the object on", "logaddexp2, true_divide, \\ floor_divide, negative, power, remainder, mod, absolute, rint,", "np.vstack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v def uhstack(arrs): \"\"\"Stack", "if unit_operator in (preserve_units, comparison_unit, arctan2_unit): inps, units = handle_comparison_units(", "return self.in_units(units, equivalence=equivalence, **kwargs) def to_value(self, units=None, equivalence=None, **kwargs): \"\"\"", "Write YTArrays with unit information to a text file. Parameters", "data._determine_fields(field)[0] if isinstance(field, tuple): finfo = data.ds._get_field_info(field[0],field[1]) else: finfo =", "\"\"\" Convert a YTArray or YTQuantity to an equivalent, e.g.,", "oth = validate_comparison_units(self, other, 'greater than') return super(YTArray, self).__gt__(oth) #", "cls2)) def loadtxt(fname, dtype='float', delimiter='\\t', usecols=None, comments='#'): r\"\"\" Load YTArrays", "operation for a YTArray subclass. \" \"Received operand types (%s)", "-------- >>> from yt import YTArray >>> a = YTArray([1,", "the default one will be used. \"\"\" # Converting from", "np.ndarray): return input_object if iterable(input_object): if any([isinstance(o, YTArray) for o", "if units[0] != units[1]: any_nonzero = [np.any(inps[0]), np.any(inps[1])] if any_nonzero[0]", "elif ufunc is power: unit2 = inp2 if isinstance(unit2, np.ndarray):", "return super(YTArray, self).__sub__(ro) def __rsub__(self, left_object): \"\"\" See __sub__. \"\"\"", "def __and__(self, right_object): return super(YTArray, self).__and__(right_object) def __rand__(self, left_object): return", "def __pos__(self): \"\"\" Posify the data. \"\"\" # this needs", "\"\"\" See __sub__. \"\"\" oth = sanitize_units_add(self, other, \"subtraction\") np.subtract(self,", "return cls2 if cls2 in (np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls2,", "lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rtruediv__(lo) def __itruediv__(self, other):", "square: square_unit, reciprocal: reciprocal_unit, sin: return_without_unit, cos: return_without_unit, tan: return_without_unit,", "at the beginning of the file, before the unit header.", "equivalent base units in the specified unit system. Parameters ----------", "g.create_dataset(dataset_name, data=self) else: d = g.create_dataset(dataset_name, data=self) for k, v", "This could be a subclass, so don't call YTArray directly.", "the array data. Parameters ---------- input_array : :obj:`!iterable` A tuple,", "yt.loadtxt(\"sphere.dat\", usecols=(1,2), delimiter=\"\\t\") \"\"\" f = open(fname, 'r') next_one =", "or astropy units The units of the quantity. Powers must", "def __repr__(self): return str(self) def validate_numpy_wrapper_units(v, arrs): if not any(isinstance(a,", "= yt.YTArray([2, 3, 4], 'cm') >>> uintersect1d(A, B) YTArray([ 2.,", "to do this because AstroPy is silly and defines #", "0.77815125, 0.84509804]) YTArray is tightly integrated with yt datasets: >>>", "or incomplete units header. Arrays will be \" \"dimensionless!\") units", "a copy of the data in the iterable. return YTArray(input_object)", "units used without referring to a dataset. \\n\" \"Perhaps you", "self.units.expr.as_powers_dict() units = [] for unit, pow in powers_dict.items(): #", "filename, dataset_name=None, group_name=None): r\"\"\"Attempts read in and convert a dataset", "is a wrapper around np.hstack that preserves units. \"\"\" v", "delimiter='\\t', usecols=None, comments='#'): r\"\"\" Load YTArrays with unit information from", "arctan, sinh, cosh, tanh, arcsinh, arccosh, arctanh, deg2rad, rad2deg, invert,", "# we have to do this because Pint doesn't recognize", "= YTArray([1,2,3], 'cm') >>> myinfo = {'field':'dinosaurs', 'type':'field_data'} >>> a.write_hdf5('test_array_data.h5',", "return super(YTArray, self).sum(axis, dtype, out), self.units @return_arr def std(self, axis=None,", "power_sign = POWER_SIGN_MAPPING[ufunc] if 'axis' in kwargs and kwargs['axis'] is", "YTUnitOperationError(op_string, inp.units, dimensionless) return ret def validate_comparison_units(this, other, op_string): #", "The pow value. \"\"\" if isinstance(power, YTArray): if not power.units.is_dimensionless:", "= UnitRegistry() powers_dict = self.units.expr.as_powers_dict() units = [] for unit,", "this YTArray or YTQuantity. \"\"\" self.units.list_equivalencies() def has_equivalent(self, equiv): \"\"\"", "registry : A UnitRegistry object The registry to create units", "example, if ``axis=0`` it will be the first dimension and", "-------- >>> E = YTQuantity(2.5, \"erg/s\") >>> E.convert_to_base(unit_system=\"galactic\") \"\"\" return", "(unit_str, Rational(exponent))) ap_units = \"*\".join(ap_units) if isinstance(arr.value, np.ndarray): return YTArray(arr.value,", "out, out_arr): if unit.is_dimensionless and unit.base_value != 1.0: if not", "filename to create and write a dataset to dataset_name: string", "str(unit).endswith(\"yr\") and len(str(unit)) in [2,3]: unit = str(unit).replace(\"yr\",\"year\") units.append(\"%s**(%s)\" %", "object or string The units you want to get a", "or equal') return super(YTArray, self).__le__(oth) def __eq__(self, other): \"\"\" Test", "preserves units. See the documentation of numpy.cross for full details.", "__itruediv__(self, other): \"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self, other)", "units header. Arrays will be \" \"dimensionless!\") units = [\"dimensionless\"]*num_cols", "isinf, isnan, signbit, copysign, nextafter, \\ modf, ldexp, frexp, fmod,", "if isinstance(ret, YTArray): if not inp.units.same_dimensions_as(ret.units): # handle special case", "_, inp, u = get_inp_u_unary(ufunc, inputs) out_arr = func(np.asarray(inp), out=out,", "units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return", "Takes a Unit object, or string of known unit symbol,", "if not all(isinstance(a, YTArray) for a in arrs): raise RuntimeError(\"Not", "inp = inp.in_units('radian').v if out_arr is not None: out_arr =", "of the data. \"\"\" return super(YTArray, self).__abs__() # # Start", "input arrays. A wrapper around numpy.intersect1d that preserves units. All", "data. Defaults to the dtype of the input data, or,", "str(context[0])) if unit is None: out_arr = np.array(out_arr, copy=False) return", "to(self, units, equivalence=None, **kwargs): \"\"\" An alias for YTArray.in_units(). See", "it will be the last dimension. This is a wrapper", ">>> E_new = E.in_base(unit_system=\"galactic\") \"\"\" return self.in_units(self.units.get_base_equivalent(unit_system)) def in_cgs(self): \"\"\"", "obj.units = input_units if registry is not None: obj.units.registry =", "super(YTArray, self).__array_wrap__(out_arr, context) if isinstance(ret, YTQuantity) and ret.shape != ():", "ufunc is not power: unit2 = Unit(registry=getattr(unit1, 'registry', None)) elif", "def convert_to_base(self, unit_system=\"cgs\"): \"\"\" Convert the array and units to", "unit_operator(*units) out_arr = func(np.asarray(inps[0]), np.asarray(inps[1]), out=out, **kwargs) if unit_operator in", "left_object) return super(YTArray, self).__rfloordiv__(lo) def __ifloordiv__(self, other): \"\"\" See __div__.", "'array_data' f = h5py.File(filename) if group_name is not None: g", "dtype=np.float64, bypass_validation=False): if not isinstance(input_scalar, (numeric_type, np.number, np.ndarray)): raise RuntimeError(\"YTQuantity", "of arrays. This wrapper around numpy.concatenate preserves units. All input", "is None: memodict = {} ret = super(YTArray, self).__deepcopy__(memodict) return", "divide_units): out, out_arr, unit = handle_multiply_divide_units( unit, units, out, out_arr)", "= func(*args, **kwargs) if ret.shape == (): return YTQuantity(ret, units)", "out_arr is not None: out_arr = ufunc(inp).view(np.ndarray) return out_arr, inp,", "list of YTArrays or single YTArray The array(s) to write", "handle_comparison_units( inps, units, ufunc, ret_class, raise_error=True) unit = unit_operator(*units) if", "\"CGS\" raise YTEquivalentDimsError(my_units, other_units, base) if not my_units.same_dimensions_as(other_units): raise YTUnitConversionError(", "5.02, 6.03]) m NumPy ufuncs will pass through units where", "method. Examples -------- >>> a = yt.YTArray(1.0e7,\"K\") >>> a.to_equivalent(\"keV\", \"thermal\")", "None and hasattr(self, 'units'): return self.units = getattr(obj, 'units', NULL_UNIT)", "and units to the equivalent base units in the specified", "write_hdf5(self, filename, dataset_name=None, info=None, group_name=None): r\"\"\"Writes a YTArray to hdf5", "strs, optional A single format (%10.5f), or a sequence of", "a YTArray before we use the `units` # attribute. if", "= YTArray([1, 2, 3], 'cm') >>> b = YTArray([4, 5,", "Load YTArrays with unit information from a text file. Each", "(unit, Rational(pow))) units = \"*\".join(units) return unit_registry.Quantity(self.value, units) # #", "Parameters ---------- arr : YTArray or YTQuantity The unitful quantity", "An optional group to write the arrays to. If not", "passed to the equivalency, which should be used if that", "__gt__(self, other): \"\"\" Test if this is greater than the", "creating a YTQuantity with # size > 1 return YTArray(np.array(out_arr),", "method See the documentation for the standard library pickle module:", "from. If the dataset has a units attribute, attempt to", "file must have the same number of values. Parameters ----------", "String or character separating columns. header : str, optional String", "unit1 = Unit(registry=getattr(unit2, 'registry', None)) if unit2 is None and", "NULL_UNIT def comparison_unit(unit1, unit2=None): return None def invert_units(unit): raise TypeError(", "units[1]: any_nonzero = [np.any(inps[0]), np.any(inps[1])] if any_nonzero[0] == np.bool_(False): units", "or subtracting with zero or # array filled with zero", "floor_divide: divide_units, negative: passthrough_unit, power: power_unit, remainder: preserve_units, mod: preserve_units,", "+ '\\t'.join(units) np.savetxt(fname, np.transpose(arrays), header=header, fmt=fmt, delimiter=delimiter, footer=footer, newline='\\n', comments=comments)", "out_arr.size == 1: return YTQuantity(np.array(out_arr), unit) else: if ret_class is", "strip them when it would be annoying to deal with", "create in the file. info: dictionary A dictionary of supplementary", ": string The unit that you wish to convert to.", "values = self.d values *= conversion_factor if offset: np.subtract(self, offset*self.uq,", "unit is None: out_arr = np.array(out_arr, copy=False) elif ufunc in", "len(v) == 2]: lut[k] = v + (0.0, r'\\rm{' +", "type(inp2)) if unit1 is None: unit1 = Unit(registry=getattr(unit2, 'registry', None))", "equal to the object on the right. \"\"\" # Check", "self.in_units(units, equivalence=equivalence, **kwargs) def to_value(self, units=None, equivalence=None, **kwargs): \"\"\" Creates", "Raise this YTArray to some power. Parameters ---------- power :", "sinh: return_without_unit, cosh: return_without_unit, tanh: return_without_unit, arcsin: return_without_unit, arccos: return_without_unit,", "yt Development Team. # # Distributed under the terms of", "be None. return False oth = validate_comparison_units(self, other, 'equal') return", "import _h5py as h5py from yt.extern.six.moves import cPickle as pickle", "right_object): return super(YTArray, self).__or__(right_object) def __ror__(self, left_object): return super(YTArray, self).__ror__(left_object)", "is None: # self is a YTArray, so it can't", "arctan, arctan2, \\ hypot, sinh, cosh, tanh, arcsinh, arccosh, arctanh,", "a YTArray. if other is None: # self is a", "operand types (%s) and (%s)\" % (cls1, cls2)) def loadtxt(fname,", "None) unit2 = getattr(inp2, 'units', None) ret_class = get_binary_op_return_class(type(inp1), type(inp2))", "YTQuantity. \"\"\" self.units.list_equivalencies() def has_equivalent(self, equiv): \"\"\" Check to see", "convert to an equivalent quantity which is not in the", "to_astropy(self, **kwargs): \"\"\" Creates a new AstroPy quantity with the", "filename to of the hdf5 file. dataset_name: string The name", "validate_numpy_wrapper_units(v, arrs) return v def uhstack(arrs): \"\"\"Stack arrays in sequence", "is cls2: return cls1 if cls1 in (np.ndarray, np.matrix, np.ma.masked_array)", "passthrough_unit, rint: return_without_unit, sign: return_without_unit, conj: passthrough_unit, exp: return_without_unit, exp2:", "a symbolic unit object to the array data. Parameters ----------", "\" \"Received operand types (%s) and (%s)\" % (cls1, cls2))", "is None: new_units = _unit_repr_check_same(self.units, units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units)", "= getattr(input_object[0], 'units', NULL_UNIT, ) if any([ff != getattr(_, 'units',", "func = getattr(ufunc, method) if 'out' in kwargs: out_orig =", "dataset[:] units = dataset.attrs.get('units', '') if 'unit_registry' in dataset.attrs.keys(): unit_lut", "called inside pickle.read() and restores the unit data from the", "already a Unit obj. if not isinstance(other_units, Unit): other_units =", "v def ucross(arr1, arr2, registry=None, axisa=-1, axisb=-1, axisc=-1, axis=None): \"\"\"Applies", "def in_base(self, unit_system=\"cgs\"): \"\"\" Creates a copy of this array", "frexp, fmod, floor, ceil, trunc, fabs, spacing try: # numpy", "Examples -------- >>> temp, velx = yt.loadtxt(\"sphere.dat\", usecols=(1,2), delimiter=\"\\t\") \"\"\"", "power: unit2 = Unit(registry=getattr(unit1, 'registry', None)) elif ufunc is power:", "np.dot(op1.d, op2.d) units = op1.units*op2.units if dot.shape == (): return", "input data, or, if none is found, uses np.float64 bypass_validation", "return self.ndarray_view() d = ndview @property def unit_quantity(self): \"\"\"Get a", "sanitize_units_mul(self, other) np.divide(self, oth, out=self) return self def __truediv__(self, right_object):", "been added \" \"to YTArray.\" % str(context[0])) if unit is", "# need to fix up the lut if the pickle", "return ret.view(np.ndarray) elif not np.any(this_object): return ret raise YTUnitOperationError(op_string, inp.units,", "cos, tan, arcsin, arccos, arctan, sinh, cosh, tanh, arcsinh, arccosh,", "it. Returns ------- Quantity object with data converted to mks", "yt.utilities.logger import ytLogger as mylog from .pint_conversions import convert_pint_units NULL_UNIT", "as mylog from .pint_conversions import convert_pint_units NULL_UNIT = Unit() POWER_SIGN_MAPPING", "a unit registry and this is specified, this will be", "or a sequence of formats. delimiter : str, optional String", "The unit objects handle being multiplied. \"\"\" ro = sanitize_units_mul(self,", "self.units.get_conversion_factor(new_units) self.units = new_units values = self.d values *= conversion_factor", "None: out_arr = np.array(out_arr, copy=False) return out_arr out_arr.units = unit", "and then serialized by pickle. \"\"\" super(YTArray, self).__setstate__(state[1:]) try: unit,", "else: out_arr = ret_class(np.asarray(out_arr), unit) if out is not None:", "\"erg/cm**3\") >>> c = yt.YTArray.from_pint(b) \"\"\" p_units = [] for", "Copyright (c) 2013, yt Development Team. # # Distributed under", "b 201.0 cm >>> b + a 2.01 m NumPy", "units, ufunc, ret_class) unit = unit_operator(*units) out_arr = func(np.asarray(inps[0]), np.asarray(inps[1]),", "if isinstance(out_orig[0], YTArray): out_orig[0].units = unit return out_arr def copy(self,", "registry associated with the unit object. dtype : data-type The", "equivalent mks units, and returns it. Returns ------- Quantity object", "any(isinstance(a, YTArray) for a in arrs): return v if not", "\"\"\"Find the sorted unique elements of the two input arrays.", "= g.create_dataset(dataset_name, data=self) else: d = g.create_dataset(dataset_name, data=self) for k,", "AstroPy is silly and defines # hour as \"h\" if", "I don't do this, super(YTArray, self).__pow__ returns a YTArray #", "equiv_dims.free_symbols: base = \"SI\" else: base = \"CGS\" raise YTEquivalentDimsError(my_units,", "raise YTEquivalentDimsError(my_units, other_units, base) if not my_units.same_dimensions_as(other_units): raise YTUnitConversionError( my_units,", "which is not in the same dimensions. .. note:: All", "obj. if not isinstance(other_units, Unit): other_units = Unit(other_units, registry=my_units.registry) equiv_dims", "less: comparison_unit, less_equal: comparison_unit, not_equal: comparison_unit, equal: comparison_unit, logical_and: comparison_unit,", "3], 'cm') >>> B = yt.YTArray([2, 3, 4], 'cm') >>>", "columns. header : str, optional String that will be written", "= coerce_iterable_units(other_object) # If the other object is a YTArray", "up the lut if the pickle was saved prior to", "divide, logaddexp, logaddexp2, true_divide, \\ floor_divide, negative, power, remainder, mod,", "current units. equivalence : string, optional The equivalence you wish", "arccos, arctan, sinh, cosh, tanh, arcsinh, arccosh, arctanh, deg2rad, rad2deg,", "are not defined for YTArray instances\") def get_inp_u_unary(ufunc, inputs, out_arr=None):", "vector norm that preserves units This is a wrapper around", "norm.shape == (): return YTQuantity(norm, data.units) return YTArray(norm, data.units) def", "np.add(self, oth, out=self) return self def __sub__(self, right_object): \"\"\" Subtract", "This is called inside pickle.read() and restores the unit data", "syntax (cm**3, not cm^3). registry : ~yt.units.unit_registry.UnitRegistry The registry to", "preserves units This is a wrapper around np.dot that preserves", "true_divide: divide_units, floor_divide: divide_units, negative: passthrough_unit, power: power_unit, remainder: preserve_units,", "unit2=None): return None def invert_units(unit): raise TypeError( \"Bit-twiddling operators are", "object. Examples -------- >>> a = YTQuantity(4.0, \"cm**2/s\") >>> b", "has not been added \" \"to YTArray.\" % str(context[0])) if", "= yt.YTArray([2, 3, 4], 'cm') >>> uunion1d(A, B) YTArray([ 1.,", "< LooseVersion('1.10.0'): norm = np.linalg.norm(data, ord=ord, axis=axis) else: norm =", "@property def ndview(self): \"\"\"Get a view of the array data.\"\"\"", "else: return ret ufunc = context[0] inputs = context[1] if", "return self.convert_to_units(self.units.get_cgs_equivalent()) def convert_to_mks(self): \"\"\" Convert the array and units", "see which equivalencies are supported for this unitful quantity, try", "See the documentation for that function for descriptions of the", "could be a subclass, so don't call YTArray directly. return", "_ufunc_registry = { add: preserve_units, subtract: preserve_units, multiply: multiply_units, divide:", "the pickle was saved prior to PR #1728 # when", "g.create_dataset(dataset_name, data=self) for k, v in info.items(): d.attrs[k] = v", "isinstance(input_units, Unit): ret.units = input_units else: ret.units = Unit(input_units, registry=registry)", "return self.value*_astropy.units.Unit(str(self.units), **kwargs) @classmethod def from_pint(cls, arr, unit_registry=None): \"\"\" Convert", "usecols=None, comments='#'): r\"\"\" Load YTArrays with unit information from a", "= (units[1], units[1]) elif any_nonzero[1] == np.bool_(False): units = (units[0],", "u = get_inp_u_unary(ufunc, inputs) out_arr = func(np.asarray(inp), out=out, **kwargs) if", "dataset_name is None: dataset_name = 'array_data' f = h5py.File(filename) if", ": YTArray or YTQuantity The unitful quantity to convert from.", "UnitRegistry() powers_dict = self.units.expr.as_powers_dict() units = [] for unit, pow", "def to_astropy(self, **kwargs): \"\"\" Creates a new AstroPy quantity with", "\"\"\" return self.in_units(self.units.get_base_equivalent(unit_system)) def in_cgs(self): \"\"\" Creates a copy of", ": Unit object or string, optional The units you want", "else: if ret_class is YTQuantity: # This happens if you", "\"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rmul__(lo) def __imul__(self,", "YTArrays or single YTArray The array(s) to write to the", "ImportError(\"You don't have AstroPy installed, so you can't convert to", "and write a dataset to dataset_name: string The name of", "for unit, pow in powers_dict.items(): # we have to do", "Pint doesn't recognize # \"yr\" as \"year\" if str(unit).endswith(\"yr\") and", "log: return_without_unit, log2: return_without_unit, log10: return_without_unit, expm1: return_without_unit, log1p: return_without_unit,", "and units to the given units. Parameters ---------- units :", "in lut.items() if len(v) == 2]: lut[k] = v +", "in. If not specified, the value will be returned in", "validate_numpy_wrapper_units(v, arrs) return v def ustack(arrs, axis=0): \"\"\"Join a sequence", "= unit_operator(*units) out_arr = func(np.asarray(inps[0]), np.asarray(inps[1]), out=out, **kwargs) if unit_operator", "None: info = {} info['units'] = str(self.units) info['unit_registry'] = np.void(pickle.dumps(self.units.registry.lut))", "Divide this YTArray by the object on the right of", "@return_arr def dot(self, b, out=None): return super(YTArray, self).dot(b), self.units*b.units def", "or single YTArray The array(s) to write to the file.", "return_without_unit, cos: return_without_unit, tan: return_without_unit, sinh: return_without_unit, cosh: return_without_unit, tanh:", "not any([u1d, u2d]): if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) else:", "return new_array else: return self.to_equivalent(units, equivalence, **kwargs) def to(self, units,", "conversion. If not specified, the default base units of cgs", "is less than the object on the right. \"\"\" #", "the standard library pickle module: http://docs.python.org/2/library/pickle.html Unit metadata is encoded", "to be our class type obj = np.asarray(input_array, dtype=dtype).view(cls) #", "arccosh: return_without_unit, arctanh: return_without_unit, hypot: preserve_units, deg2rad: return_without_unit, rad2deg: return_without_unit,", "to the equivalent mks units. \"\"\" return self.convert_to_units(self.units.get_mks_equivalent()) def in_units(self,", "an ndarray rather than ytarray. Returns ------- View of this", "\", \".join(units)) return tuple([YTArray(arr, unit) for arr, unit in zip(arrays,", "self.units def __array_wrap__(self, out_arr, context=None): ret = super(YTArray, self).__array_wrap__(out_arr, context)", "__floordiv__(self, right_object): ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__floordiv__(ro) def", "default. Examples -------- >>> a = YTArray([1,2,3], 'cm') >>> myinfo", "\"\"\" oth = validate_comparison_units(self, other, 'less_than or equal') return super(YTArray,", "inp, u = get_inp_u_unary(ufunc, inputs) out_arr = func(np.asarray(inp), out=out, **kwargs)", "input_units : String unit specification, unit symbol object, or astropy", "or str The units you want to convert to. \"\"\"", "in binary_operators: unit_operator = self._ufunc_registry[context[0]] inps, units, ret_class = get_inp_u_binary(ufunc,", "group_name: string An optional group to write the arrays to.", "in_cgs(self): \"\"\" Creates a copy of this array with the", "class YTArray(np.ndarray): \"\"\" An ndarray subclass that attaches a symbolic", "This is not the same as a yt UnitRegistry object.", "group_name in f: g = f[group_name] else: g = f.create_group(group_name)", "= convert_pint_units(base) p_units.append(\"%s**(%s)\" % (bs, Rational(exponent))) p_units = \"*\".join(p_units) if", "default, None, results in all columns being read. comments :", "1.10.0. \"\"\" if LooseVersion(np.__version__) < LooseVersion('1.10.0'): norm = np.linalg.norm(data, ord=ord,", "return type(args[0])(ret, units) return wrapped @lru_cache(maxsize=128, typed=False) def sqrt_unit(unit): return", "delimiter=\"\\t\") \"\"\" if not isinstance(arrays, list): arrays = [arrays] units", "and defines # hour as \"h\" if unit_str == \"h\":", "setstate method This is called inside pickle.read() and restores the", "convert_to_base(self, unit_system=\"cgs\"): \"\"\" Convert the array and units to the", "in the same dimensions. .. note:: All additional keyword arguments", "of this array's data. \"\"\" return self.view(np.ndarray) def to_ndarray(self): \"\"\"", "unit header. footer : str, optional String that will be", "known unit symbol, and check that it is compatible with", "unitful quantity to convert from. unit_registry : Pint UnitRegistry, optional", "% str(context[0])) if unit is None: out_arr = np.array(out_arr, copy=False)", "> 1 out_arr = YTArray(np.asarray(out_arr), unit) else: out_arr = ret_class(np.asarray(out_arr),", "do this because Pint doesn't recognize # \"yr\" as \"year\"", "if offset: np.subtract(self, offset*self.uq, self) return self def convert_to_base(self, unit_system=\"cgs\"):", "not None: g = f[group_name] else: g = f dataset", "this array with the unit information stripped \"\"\" return np.array(self)", "other_object): inp = coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) # If the", "# This happens if you do ndarray * YTQuantity. Explicitly", "dimensions. if isinstance(ret, YTArray): if inp.units.same_dimensions_as(ret.units): ret.in_units(inp.units) return ret def", "True: obj = np.asarray(input_array, dtype=dtype).view(cls) obj.units = input_units if registry", "other object is not a YTArray, then one of the", "is None and ufunc is not power: unit2 = Unit(registry=getattr(unit1,", "\" \"to YTArray.\" % str(context[0])) if unit is None: out_arr", "the docstrings of that function for details. \"\"\" return self.in_units(units,", "BSD License. # # The full license is in the", "text file. Parameters ---------- fname : str The file to", "other, out=self) return self def __pow__(self, power): \"\"\" Raise this", "bypass_validation=False): if dtype is None: dtype = getattr(input_array, 'dtype', np.float64)", "# this needs to be defined for all numpy versions,", "self def convert_to_base(self, unit_system=\"cgs\"): \"\"\" Convert the array and units", "unit_registry is None: unit_registry = UnitRegistry() powers_dict = self.units.expr.as_powers_dict() units", "b = YTArray([4, 5, 6], 'm') >>> a + b", "isinstance(x, YTArray): arr = copy.deepcopy(x) arr.convert_to_units(units) return arr if isinstance(x,", "the keyword arguments. The keepdims argument is ignored if the", "same unit information. \"\"\" if _astropy.units is None: raise ImportError(\"You", "\"\"\" oth = sanitize_units_add(self, other, \"subtraction\") np.subtract(self, oth, out=self) return", "while preserving units The axis parameter specifies the index of", "f = h5py.File(filename) if group_name is not None: g =", "with the same # dimensions. if isinstance(ret, YTArray): if inp.units.same_dimensions_as(ret.units):", "the hdf5 file. dataset_name: string The name of the dataset", "**kwargs): \"\"\" An alias for YTArray.in_units(). See the docstrings of", "pass else: raise YTUnitOperationError(ufunc, unit1, unit2) unit2 = 1.0 return", "# when the pickle format changed if len(lut['m']) == 2:", "string The name of the dataset to create in the", "a sequence of formats. delimiter : str, optional String or", "YTArray to hdf5 file. Parameters ---------- filename: string The filename", "_astropy from sympy import Rational from yt.units.unit_lookup_table import \\ default_unit_symbol_lut", "this because Pint doesn't recognize # \"yr\" as \"year\" if", "base, exponent in zip(u.bases, u.powers): unit_str = base.to_string() # we", "array with the unit information stripped \"\"\" return np.array(self) @classmethod", "conj: passthrough_unit, exp: return_without_unit, exp2: return_without_unit, log: return_without_unit, log2: return_without_unit,", "cases here, let the Unit class handle if # it's", "return super(YTArray, self).mean(axis, dtype, out), self.units @return_arr def sum(self, axis=None,", "the default base units of cgs are used. Examples --------", "copy of this array with the data in the supplied", "string, optional The unit system to be used in the", "u2d = units[1].is_dimensionless any_nonzero = [np.any(inps[0]), np.any(inps[1])] if any_nonzero[0] ==", "defined for YTArray instances\") def get_inp_u_unary(ufunc, inputs, out_arr=None): inp =", "a dataset. \\n\" \"Perhaps you meant to do something like", "the top level by default. \"\"\" import h5py from yt.extern.six.moves", "yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.quan(5, 'code_length') >>> a.in_cgs() 1.543e+25 cm", "except: return False return True def return_arr(func): @wraps(func) def wrapped(*args,", "---------- unit_system : string, optional The unit system to be", "kwargs.pop('out') out = np.asarray(out_orig[0]) else: out = None if len(inputs)", "dictionary of supplementary info to write to append as attributes", "Unit object or string The units you want to get", "def square_unit(unit): return unit*unit @lru_cache(maxsize=128, typed=False) def divide_units(unit1, unit2): return", "the other is a YTArray. oth = validate_comparison_units(self, other, 'greater", "cls2): return cls1 if issubclass(cls2, cls1): return cls2 else: raise", "(): return YTQuantity(ret, self.units, bypass_validation=True) else: if hasattr(self, 'units'): ret.units", "\\ YTUnitOperationError, YTUnitConversionError, \\ YTUfuncUnitError, YTIterableUnitCoercionError, \\ YTInvalidUnitEquivalence, YTEquivalentDimsError from", "# # Start operation methods # if LooseVersion(np.__version__) < LooseVersion('1.13.0'):", "super(YTArray, self).__xor__(right_object) def __rxor__(self, left_object): return super(YTArray, self).__rxor__(left_object) def __ixor__(self,", "this is greater than the object on the right. \"\"\"", ">>> E = YTQuantity(2.5, \"erg/s\") >>> E_new = E.in_base(unit_system=\"galactic\") \"\"\"", "% (cls1, cls2)) def loadtxt(fname, dtype='float', delimiter='\\t', usecols=None, comments='#'): r\"\"\"", "that will be prepended to the ``header`` and ``footer`` strings,", "else: raise RuntimeError( \"Support for the %s ufunc with %i", "unit symbol object, or astropy units The units of the", "= np.linalg.norm(data, ord=ord, axis=axis, keepdims=keepdims) if norm.shape == (): return", "E_new = E.in_base(unit_system=\"galactic\") \"\"\" return self.in_units(self.units.get_base_equivalent(unit_system)) def in_cgs(self): \"\"\" Creates", "# # If I don't do this, super(YTArray, self).__pow__ returns", "'g/cm**3') >>> np.abs(a) 12 g/cm**3 and strip them when it", "adding or subtracting with zero or # array filled with", "less, less_equal, not_equal, equal, logical_and, logical_or, logical_xor, maximum, minimum, fmax,", "{} ret = super(YTArray, self).__deepcopy__(memodict) return type(self)(ret, copy.deepcopy(self.units)) class YTQuantity(YTArray):", "conversion methods # def write_hdf5(self, filename, dataset_name=None, info=None, group_name=None): r\"\"\"Writes", "will be used instead of the registry associated with the", "self.has_equivalent(equiv) and oneway_or_equivalent: new_arr = this_equiv.convert( self, conv_unit.dimensions, **kwargs) if", "\"\"\"Pickle setstate method This is called inside pickle.read() and restores", "if units[0].dimensions == units[1].dimensions: out_arr = np.multiply(out_arr.view(np.ndarray), unit.base_value, out=out) unit", "than') return super(YTArray, self).__gt__(oth) # # End comparison operators #", "YTArray or YTQuantity. Parameters ---------- arr : Pint Quantity The", "to read, with 0 being the first. For example, ``usecols", "c = yt.YTArray.from_pint(b) \"\"\" p_units = [] for base, exponent", "values *= conversion_factor if offset: np.subtract(self, offset*self.uq, self) return self", "(I think?) # # If I don't do this, super(YTArray,", "import YTArray >>> a = YTArray([1, 2, 3], 'cm') >>>", "= [np.any(inps[0]), np.any(inps[1])] if any_nonzero[0] == np.bool_(False): units = (units[1],", "inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units def handle_comparison_units(inps,", "preserves units. All input arrays must have the same units.", "B) YTArray([ 2., 3.]) cm \"\"\" v = np.intersect1d(arr1, arr2,", "__div__. \"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rtruediv__(lo) def", "if unit_registry is None: unit_registry = UnitRegistry() powers_dict = self.units.expr.as_powers_dict()", "one of the arrays must be # dimensionless or filled", "in kwargs: out_orig = kwargs.pop('out') out = np.asarray(out_orig[0]) else: out", "have identical units.\") v.units = a1.units return v def uconcatenate(arrs,", "but it's not a Unit object. # don't handle all", "conj, exp, exp2, log, log2, log10, expm1, log1p, sqrt, square,", "self).__truediv__(ro) def __rtruediv__(self, left_object): \"\"\" See __div__. \"\"\" lo =", "self.units.same_dimensions_as(conv_unit): return self.in_units(conv_unit) this_equiv = equivalence_registry[equiv]() oneway_or_equivalent = ( conv_unit.has_equivalent(equiv)", "operator. The unit objects handle being multiplied. \"\"\" ro =", "pass elif iterable(input_array) and input_array: if isinstance(input_array[0], YTArray): return YTArray(np.array(input_array,", "= kwargs.pop('out') out = np.asarray(out_orig[0]) else: out = None if", "unit = handle_multiply_divide_units( unit, units, out_arr, out_arr) else: raise RuntimeError(", "axisa=-1, axisb=-1, axisc=-1, axis=None): \"\"\"Applies the cross product to two", "directly. return type(args[0])(ret, units) return wrapped @lru_cache(maxsize=128, typed=False) def sqrt_unit(unit):", "obj_state[:],) new_ret = np_ret[:2] + unit_state + np_ret[3:] return new_ret", "for YTArray.in_units(). See the docstrings of that function for details.", "YTArray instances\") def bitop_units(unit1, unit2): raise TypeError( \"Bit-twiddling operators are", "unit_registry = UnitRegistry() powers_dict = self.units.expr.as_powers_dict() units = [] for", "units = (units[0], units[0]) elif not any([u1d, u2d]): if not", "converters=None, unpack=True, usecols=usecols, ndmin=0) if usecols is not None: units", "\" Units\\n \" + '\\t'.join(units) np.savetxt(fname, np.transpose(arrays), header=header, fmt=fmt, delimiter=delimiter,", "units = handle_preserve_units( inps, units, ufunc, ret_class) unit = unit_operator(*units)", "try: col_words = line.strip().split(delimiter) for word in col_words: float(word) num_cols", "other): \"\"\" See __sub__. \"\"\" oth = sanitize_units_add(self, other, \"subtraction\")", "a = np.random.random(10) >>> b = ureg.Quantity(a, \"erg/cm**3\") >>> c", "equivalent, e.g., something that is related by only a constant", "np.abs(a) 12 g/cm**3 and strip them when it would be", "1 return YTArray(np.array(out_arr), unit) return ret_class(np.array(out_arr, copy=False), unit) else: #", "or YTQuantity to an equivalent, e.g., something that is related", "return_without_unit, sinh: return_without_unit, cosh: return_without_unit, tanh: return_without_unit, arcsin: return_without_unit, arccos:", "def __setstate__(self, state): \"\"\"Pickle setstate method This is called inside", "__pow__(self, power): \"\"\" Raise this YTArray to some power. Parameters", "self.in_units(self.units.get_cgs_equivalent()) def in_mks(self): \"\"\" Creates a copy of this array", "cosh: return_without_unit, tanh: return_without_unit, arcsin: return_without_unit, arccos: return_without_unit, arctan: return_without_unit,", "'units', None) if u is None: u = NULL_UNIT if", "of this array with the data in the equivalent mks", "logical_and: comparison_unit, logical_or: comparison_unit, logical_xor: comparison_unit, logical_not: return_without_unit, maximum: preserve_units,", "Multiply this YTArray by the object on the right of", "arcsinh: return_without_unit, arccosh: return_without_unit, arctanh: return_without_unit, hypot: preserve_units, deg2rad: return_without_unit,", "in the same units. Parameters ---------- unit : string The", "circular import from yt.funcs def iterable(obj): try: len(obj) except: return", "module: http://docs.python.org/2/library/pickle.html Unit metadata is encoded in the zeroth element", "units. See the documentation of numpy.concatenate for full details. Examples", "convert a dataset in an hdf5 file into a YTArray.", "isinstance(arr.magnitude, np.ndarray): return YTArray(arr.magnitude, p_units, registry=unit_registry) else: return YTQuantity(arr.magnitude, p_units,", "right of the `+` operator. Must check for the correct", "func(np.asarray(inp), out=out, **kwargs) if ufunc in (multiply, divide) and method", "ret.in_units(inp.units) else: # If the other object is not a", "words[1] == \"Units\": next_one = True else: # Here we", "---------- filename: string The filename to create and write a", "(str, bytes)): if input_units.startswith('code_'): raise UnitParseError( \"Code units used without", "we have to do this because Pint doesn't recognize #", "'cm') >>> B = yt.YTArray([2, 3, 4], 'cm') >>> uunion1d(A,", "for a YTArray subclass. \" \"Received operand types (%s) and", "ddof), self.units def __array_wrap__(self, out_arr, context=None): ret = super(YTArray, self).__array_wrap__(out_arr,", "the Unit class handle if # it's a str. units", "raise_error=False): if units[0] != units[1]: u1d = units[0].is_dimensionless u2d =", "np.stack that preserves units. \"\"\" v = np.stack(arrs) v =", "super(YTArray, self).__deepcopy__(memodict) return type(self)(ret, copy.deepcopy(self.units)) class YTQuantity(YTArray): \"\"\" A scalar", "divide_units, logaddexp: return_without_unit, logaddexp2: return_without_unit, true_divide: divide_units, floor_divide: divide_units, negative:", "it can't be None. return False oth = validate_comparison_units(self, other,", "optional A single format (%10.5f), or a sequence of formats.", "unit1 @lru_cache(maxsize=128, typed=False) def power_unit(unit, power): return unit**power @lru_cache(maxsize=128, typed=False)", "the ``header`` and ``footer`` strings, to mark them as comments.", "if this is greater than the object on the right.", "arrays : list of YTArrays or single YTArray The array(s)", "that preserves units. See the documentation for that function for", "d.attrs[k] else: del f[dataset_name] d = g.create_dataset(dataset_name, data=self) else: d", "unit data from the metadata extracted in __reduce__ and then", "get_binary_op_return_class(type(inp1), type(inp2)) if unit1 is None: unit1 = Unit(registry=getattr(unit2, 'registry',", "return str(self) def validate_numpy_wrapper_units(v, arrs): if not any(isinstance(a, YTArray) for", "new_ret def __setstate__(self, state): \"\"\"Pickle setstate method This is called", "new_array else: return self.to_equivalent(units, equivalence, **kwargs) def to(self, units, equivalence=None,", "if unit1 is None: unit1 = Unit(registry=getattr(unit2, 'registry', None)) if", "to a YTArray or YTQuantity. Parameters ---------- arr : AstroPy", "by e.g. ``yt.loadtxt``. Examples -------- >>> sp = ds.sphere(\"c\", (100,\"kpc\"))", "wrapped @lru_cache(maxsize=128, typed=False) def sqrt_unit(unit): return unit**0.5 @lru_cache(maxsize=128, typed=False) def", "arccos, arctan, arctan2, \\ hypot, sinh, cosh, tanh, arcsinh, arccosh,", "right_shift: bitop_units, greater: comparison_unit, greater_equal: comparison_unit, less: comparison_unit, less_equal: comparison_unit,", "arrays = np.loadtxt(fname, dtype=dtype, comments=comments, delimiter=delimiter, converters=None, unpack=True, usecols=usecols, ndmin=0)", "string The filename to create and write a dataset to", "group_name: string An optional group to read the arrays from.", "Creates a copy of this array with the data in", "terms of the Modified BSD License. # # The full", "type(self)(new_arr[0], new_arr[1]).in_units(unit) except YTUnitConversionError: raise YTInvalidUnitEquivalence(equiv, self.units, unit) else: return", "descriptions of the keyword arguments. The keepdims argument is ignored", "The axis parameter specifies the index of the new axis", "numpy as np from distutils.version import LooseVersion from functools import", "\"\"\" from yt.utilities.on_demand_imports import _h5py as h5py from yt.extern.six.moves import", "return self def __truediv__(self, right_object): ro = sanitize_units_mul(self, right_object) return", "issubclass(cls2, (numeric_type, np.number, list, tuple)): return cls1 if issubclass(cls1, YTQuantity):", "wraps from numpy import \\ add, subtract, multiply, divide, logaddexp,", "return True oth = validate_comparison_units(self, other, 'not equal') return super(YTArray,", "== self.shape and d.dtype == self.dtype: d[...] = self for", "oth = sanitize_units_mul(self, other) np.divide(self, oth, out=self) return self def", "and ufunc in trigonometric_operators: inp = inp.in_units('radian').v if out_arr is", "same dimensions. .. note:: All additional keyword arguments are passed", "\"\"\" return self.units.has_equivalent(equiv) def ndarray_view(self): \"\"\" Returns a view into", "ufuncs will pass through units where appropriate. >>> import numpy", "return YTQuantity(dot, units) return YTArray(dot, units) def uvstack(arrs): \"\"\"Stack arrays", "np.stack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v def array_like_field(data, x,", "super(YTArray, self).__setstate__(state[1:]) try: unit, lut = state[0] except TypeError: #", "level by default. Examples -------- >>> a = YTArray([1,2,3], 'cm')", "__pos__(self): \"\"\" Posify the data. \"\"\" # this needs to", "super(YTArray, self).__pow__(power) return type(self)(ret, input_units='') return super(YTArray, self).__pow__(power) def __abs__(self):", "a Pint Quantity. Parameters ---------- arr : YTArray or YTQuantity", "same # dimensions. if isinstance(ret, YTArray): if inp.units.same_dimensions_as(ret.units): ret.in_units(inp.units) return", "self.dtype: d[...] = self for k in d.attrs.keys(): del d.attrs[k]", "unit2 is None and ufunc is not power: unit2 =", "not None: if group_name in f: g = f[group_name] else:", "silly and defines # hour as \"h\" if unit_str ==", "from numpy import positive, divmod as divmod_, isnat, heaviside except", "cgs units. \"\"\" return self.convert_to_units(self.units.get_cgs_equivalent()) def convert_to_mks(self): \"\"\" Convert the", "the equivalent mks units. \"\"\" return self.convert_to_units(self.units.get_mks_equivalent()) def in_units(self, units,", "same units. See the documentation of numpy.concatenate for full details.", "in the specified unit system. Parameters ---------- unit_system : string,", "NumPy array \"\"\" if units is None: v = self.value", "the arrays are datasets at the top level by default.", "is already associated with a unit registry and this is", "The keepdims argument is ignored if the version of numpy", "\"\"\" # let Unit() handle units arg if it's not", "is encoded in the zeroth element of third element of", "dtype of the array data. Defaults to the dtype of", "f: g = f[group_name] else: g = f.create_group(group_name) else: g", "g = f dataset = g[dataset_name] data = dataset[:] units", "\"\"\" Lists the possible equivalencies associated with this YTArray or", "fmin: preserve_units, isreal: return_without_unit, iscomplex: return_without_unit, isfinite: return_without_unit, isinf: return_without_unit,", "creating a YTQuantity with # size > 1 out_arr =", "super(YTArray, self).sum(axis, dtype, out), self.units @return_arr def std(self, axis=None, dtype=None,", "Unit object or string, optional The units you want to", "less_equal: comparison_unit, not_equal: comparison_unit, equal: comparison_unit, logical_and: comparison_unit, logical_or: comparison_unit,", "# self is a YTArray, so it can't be None.", "self.value*_astropy.units.Unit(str(self.units), **kwargs) @classmethod def from_pint(cls, arr, unit_registry=None): \"\"\" Convert a", "self).__ge__(oth) def __gt__(self, other): \"\"\" Test if this is greater", "`*` operator. The unit objects handle being multiplied. \"\"\" ro", "= unit if out_arr.size == 1: return YTQuantity(np.array(out_arr), unit) else:", "line of numbers try: col_words = line.strip().split(delimiter) for word in", "docstrings of that function for details. \"\"\" return self.in_units(units, equivalence=equivalence,", "from .pint_conversions import convert_pint_units NULL_UNIT = Unit() POWER_SIGN_MAPPING = {multiply:", "wrapper around np.vstack that preserves units. \"\"\" v = np.vstack(arrs)", "subclass, so don't call YTArray directly. return type(args[0])(ret, units) return", "if not all(a.units == a1.units for a in arrs[1:]): raise", "= {} ret = super(YTArray, self).__deepcopy__(memodict) return type(self)(ret, copy.deepcopy(self.units)) class", "exp2, log, log2, log10, expm1, log1p, sqrt, square, reciprocal, sin,", "-1: ret = super(YTArray, self).__pow__(power) return type(self)(ret, input_units='') return super(YTArray,", "values must be numeric\") ret = YTArray.__new__(cls, input_scalar, input_units, registry,", "[] for base, exponent in arr._units.items(): bs = convert_pint_units(base) p_units.append(\"%s**(%s)\"", "(conversion_factor, offset) = self.units.get_conversion_factor(new_units) new_array = type(self)(self.ndview * conversion_factor, new_units)", "of adding or subtracting with zero or # array filled", "not cm^3). registry : A UnitRegistry object The registry to", "right. \"\"\" # Check that other is a YTArray. if", "if unit_operator in (multiply_units, divide_units): out, out_arr, unit = handle_multiply_divide_units(", "return out, out_arr, unit def coerce_iterable_units(input_object): if isinstance(input_object, np.ndarray): return", "return super(YTArray, self).__lt__(oth) def __le__(self, other): \"\"\"Test if this is", "'cm') >>> B = yt.YTArray([2, 3, 4], 'cm') >>> uintersect1d(A,", "from yt.units.unit_lookup_table import \\ default_unit_symbol_lut from yt.units.equivalencies import equivalence_registry from", "# # Start convenience methods # @property def value(self): \"\"\"Get", "the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- import copy", "default. \"\"\" import h5py from yt.extern.six.moves import cPickle as pickle", "\\ dimensionless, \\ em_dimensions from yt.utilities.exceptions import \\ YTUnitOperationError, YTUnitConversionError,", "unit_registry=None): \"\"\" Convert a YTArray or YTQuantity to a Pint", "= sanitize_units_mul(self, left_object) return super(YTArray, self).__rfloordiv__(lo) def __ifloordiv__(self, other): \"\"\"", "it in that system's base units. Parameters ---------- unit_system :", "= super(YTArray, self).__deepcopy__(memodict) return type(self)(ret, copy.deepcopy(self.units)) class YTQuantity(YTArray): \"\"\" A", "1, 2, 3]) g/cm**3 and strip them when it would", "== 1: out_arr = YTQuantity(np.asarray(out_arr), unit) else: if ret_class is", "modf, frexp, fabs, spacing, positive, isnat, ) binary_operators = (", "field = data._determine_fields(field)[0] if isinstance(field, tuple): finfo = data.ds._get_field_info(field[0],field[1]) else:", "a YTArray or YTQuantity. Parameters ---------- arr : Pint Quantity", "and kwargs['axis'] is not None: unit = u**(power_sign*inp.shape[kwargs['axis']]) else: unit", "type(self)(super(YTArray, self).__pos__(), self.units) @return_arr def dot(self, b, out=None): return super(YTArray,", "arr2]) return v def uunion1d(arr1, arr2): \"\"\"Find the union of", "def to_value(self, units=None, equivalence=None, **kwargs): \"\"\" Creates a copy of", "instead: \\n\" \"ds.arr(%s, \\\"%s\\\")\" % (input_array, input_units) ) if isinstance(input_array,", "\"\"\" Negate the data. \"\"\" return super(YTArray, self).__neg__() def __mul__(self,", "return self.to_equivalent(units, equivalence, **kwargs) def to(self, units, equivalence=None, **kwargs): \"\"\"", "instead of the registry associated with the unit object. dtype", "return super(YTArray, self).__neg__() def __mul__(self, right_object): \"\"\" Multiply this YTArray", "== 0: continue if line[0] == comments: if next_one: units", "the object on the right. \"\"\" oth = validate_comparison_units(self, other,", "equal') return super(YTArray, self).__ne__(oth) def __ge__(self, other): \"\"\" Test if", "is not None: unit = u**(power_sign*inp.shape[kwargs['axis']]) else: unit = u**(power_sign*inp.size)", "return_without_unit, heaviside: preserve_units, } __array_priority__ = 2.0 def __new__(cls, input_array,", "'less_than or equal') return super(YTArray, self).__le__(oth) def __eq__(self, other): \"\"\"", "\\ YTInvalidUnitEquivalence, YTEquivalentDimsError from yt.utilities.lru_cache import lru_cache from numbers import", "file. Parameters ---------- filename: string The filename to create and", "axisc=axisc, axis=axis) units = arr1.units * arr2.units arr = YTArray(v,", "LooseVersion('1.13.0'): def __add__(self, right_object): \"\"\" Add this ytarray to the", ">>> a.in_cgs() YTArray([ 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24]) cm This", "# Begin reduction operators # @return_arr def prod(self, axis=None, dtype=None,", "import numpy as np >>> a = YTArray(np.arange(8) - 4,", "be a valid unit object. Defaults to False. Examples --------", "YTUnitOperationError(ufunc, *units) inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units", "bitop_units, right_shift: bitop_units, greater: comparison_unit, greater_equal: comparison_unit, less: comparison_unit, less_equal:", "def __radd__(self, left_object): \"\"\" See __add__. \"\"\" lo = sanitize_units_add(self,", "registry=registry) return ret elif isinstance(input_array, np.ndarray): pass elif iterable(input_array) and", "return super(YTArray, self).__add__(ro) def __radd__(self, left_object): \"\"\" See __add__. \"\"\"", "instances\") def bitop_units(unit1, unit2): raise TypeError( \"Bit-twiddling operators are not", "axis=None, dtype=None, out=None): return super(YTArray, self).sum(axis, dtype, out), self.units @return_arr", ">>> from yt import YTArray >>> a = YTArray([1, 2,", "inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units def handle_multiply_divide_units(unit,", "__ior__(self, other): np.bitwise_or(self, other, out=self) return self def __xor__(self, right_object):", "ufunc = context[0] inputs = context[1] if ufunc in unary_operators:", "is found, uses np.float64 bypass_validation : boolean If True, all", "v = np.concatenate(arrs, axis=axis) v = validate_numpy_wrapper_units(v, arrs) return v", "\"\"\" v = np.concatenate(arrs, axis=axis) v = validate_numpy_wrapper_units(v, arrs) return", "(numeric_type, np.number, list, tuple)): return cls1 if issubclass(cls1, YTQuantity): return", "== other.units.base_value: return other if not this.units.same_dimensions_as(other.units): raise YTUnitOperationError(op_string, this.units,", "divide: -1} # redefine this here to avoid a circular", "base) if not my_units.same_dimensions_as(other_units): raise YTUnitConversionError( my_units, my_units.dimensions, other_units, other_units.dimensions)", "% (input_array, input_units) ) if isinstance(input_array, YTArray): ret = input_array.view(cls)", "produce corrupted, invalid units or array data, but can lead", "units[0] != units[1]: any_nonzero = [np.any(inps[0]), np.any(inps[1])] if any_nonzero[0] ==", "str, optional String or character separating columns. header : str,", "all(a.units == a1.units for a in arrs[1:]): raise RuntimeError(\"Your arrays", "\"\"\" v = np.intersect1d(arr1, arr2, assume_unique=assume_unique) v = validate_numpy_wrapper_units(v, [arr1,", "bypass_validation=True) else: if hasattr(self, 'units'): ret.units = self.units return ret", "tuple, list, or array to attach units to input_units :", "other is a YTArray. if other is None: return True", "bare quantity in. If not specified, the value will be", "out=self) return self def __or__(self, right_object): return super(YTArray, self).__or__(right_object) def", "isreal: return_without_unit, iscomplex: return_without_unit, isfinite: return_without_unit, isinf: return_without_unit, isnan: return_without_unit,", "data. \"\"\" return super(YTArray, self).__neg__() def __mul__(self, right_object): \"\"\" Multiply", "get_inp_u_unary(ufunc, inputs, out_arr=None): inp = inputs[0] u = getattr(inp, 'units',", "YTArray directly. return type(args[0])(ret, units) return wrapped @lru_cache(maxsize=128, typed=False) def", "\"\"\" v = np.union1d(arr1, arr2) v = validate_numpy_wrapper_units(v, [arr1, arr2])", "-inf, 0. , 0.30103 , 0.47712125, 0.60205999, 0.69897 , 0.77815125,", "value. \"\"\" if isinstance(power, YTArray): if not power.units.is_dimensionless: raise YTUnitOperationError('power',", "data.ds.quan(x, units) def get_binary_op_return_class(cls1, cls2): if cls1 is cls2: return", "from numbers import Number as numeric_type from yt.utilities.on_demand_imports import _astropy", "convert from. unit_registry : Pint UnitRegistry, optional The Pint UnitRegistry", "serialized the unit symbol lookup table # into the pickle", "def copy(self, order='C'): return type(self)(np.copy(np.asarray(self)), self.units) def __array_finalize__(self, obj): if", "meant to do something like this instead: \\n\" \"ds.arr(%s, \\\"%s\\\")\"", "v = validate_numpy_wrapper_units(v, [arr1, arr2]) return v def unorm(data, ord=None,", "np.subtract(self, oth, out=self) return self def __neg__(self): \"\"\" Negate the", "it's a str. units = Unit(input_units, registry=registry) # Attach the", "units = Unit() elif isinstance(input_units, Unit): if registry and registry", "'m') >>> a + b 201.0 cm >>> b +", "isinstance(field, tuple): finfo = data.ds._get_field_info(field[0],field[1]) else: finfo = data.ds._get_field_info(field) if", "dtype=None, out=None): if axis is not None: units = self.units**self.shape[axis]", "obj_state = np_ret[2] unit_state = (((str(self.units), self.units.registry.lut),) + obj_state[:],) new_ret", "\"\"\" Raise this YTArray to some power. Parameters ---------- power", "the arrays from. If not specified, the arrays are datasets", "unit_registry.Quantity(self.value, units) # # End unit conversion methods # def", "ret elif isinstance(input_array, np.ndarray): pass elif iterable(input_array) and input_array: if", "ret_class = get_binary_op_return_class(type(inp1), type(inp2)) if unit1 is None: unit1 =", "in out_arr)) elif out_arr.size == 1: out_arr = YTQuantity(np.asarray(out_arr), unit)", "isinstance(input_units, (str, bytes)): if input_units.startswith('code_'): raise UnitParseError( \"Code units used", "np.any(ret): raise YTUnitOperationError(op_string, inp.units, dimensionless) return ret def validate_comparison_units(this, other,", "p_units = \"*\".join(p_units) if isinstance(arr.magnitude, np.ndarray): return YTArray(arr.magnitude, p_units, registry=unit_registry)", "o in input_object]): ff = getattr(input_object[0], 'units', NULL_UNIT, ) if", "equivalencies are supported for this unitful quantity, try the :meth:`list_equivalencies`", "# don't handle all the cases here, let the Unit", "isinstance(other_units, Unit): other_units = Unit(other_units, registry=my_units.registry) equiv_dims = em_dimensions.get(my_units.dimensions, None)", "have to do this because AstroPy is silly and defines", "conversion methods # def convert_to_units(self, units): \"\"\" Convert the array", "numpy import \\ add, subtract, multiply, divide, logaddexp, logaddexp2, true_divide,", "raise RuntimeError( \"Support for the %s ufunc has not been", "except YTUnitConversionError: raise YTInvalidUnitEquivalence(equiv, self.units, unit) else: return new_arr.in_units(unit) else:", "isinstance(arrays, list): arrays = [arrays] units = [] for array", "with them. >>> np.log10(a) array([ -inf, 0. , 0.30103 ,", "returns it in that system's base units. Parameters ---------- unit_system", "True \"\"\" _ufunc_registry = { add: preserve_units, subtract: preserve_units, multiply:", "datasets at the top level by default. Examples -------- >>>", "of this array with the data in the supplied units,", "h5py.File(filename) if group_name is not None: if group_name in f:", "arcsin, arccos, arctan, arctan2, \\ hypot, sinh, cosh, tanh, arcsinh,", "parameter specifies the index of the new axis in the", "an already formed ndarray instance # We first cast to", "be used. NOTE: This is not the same as a", "bitwise_and, bitwise_or, bitwise_xor, left_shift, right_shift, greater, greater_equal, less, less_equal, not_equal,", "self).__lt__(oth) def __le__(self, other): \"\"\"Test if this is less than", "so you can't convert to \" + \"an AstroPy quantity.\")", "else: units = self.units**self.size return super(YTArray, self).prod(axis, dtype, out), units", "other): \"\"\" See __add__. \"\"\" oth = sanitize_units_add(self, other, \"addition\")", "else: return YTQuantity(arr.value, ap_units, registry=unit_registry) def to_astropy(self, **kwargs): \"\"\" Creates", "right_object): \"\"\" Add this ytarray to the object on the", "from. unit_registry : yt UnitRegistry, optional A yt unit registry", "registry=unit_registry) else: return YTQuantity(arr.magnitude, p_units, registry=unit_registry) def to_pint(self, unit_registry=None): \"\"\"", "not defined for YTArray instances\") def bitop_units(unit1, unit2): raise TypeError(", "else: return v def in_base(self, unit_system=\"cgs\"): \"\"\" Creates a copy", "\"\"\" if isinstance(power, YTArray): if not power.units.is_dimensionless: raise YTUnitOperationError('power', power.unit)", "saved prior to PR #1728 # when the pickle format", "default one will be used. NOTE: This is not the", "201.0 cm >>> b + a 2.01 m NumPy ufuncs", "YT arrays. This wrapper around numpy.cross preserves units. See the", "name of the dataset to create in the file. info:", "data.ds.arr(x, units) else: return data.ds.quan(x, units) def get_binary_op_return_class(cls1, cls2): if", "and strip them when it would be annoying to deal", "# def convert_to_units(self, units): \"\"\" Convert the array and units", "a dataset to dataset_name: string The name of the dataset", "wrapped(*args, **kwargs): ret, units = func(*args, **kwargs) if ret.shape ==", ": str or sequence of strs, optional A single format", "that preserves units. \"\"\" dot = np.dot(op1.d, op2.d) units =", "ret ufunc = context[0] inputs = context[1] if ufunc in", "units. \"\"\" v = np.stack(arrs) v = validate_numpy_wrapper_units(v, arrs) return", "the 2nd, 5th and 6th columns. The default, None, results", "or issubclass(cls1, (numeric_type, np.number, list, tuple)): return cls2 if cls2", "floor, ceil, trunc, fabs, spacing try: # numpy 1.13 or", "in col_words: float(word) num_cols = len(col_words) break except ValueError: mylog.warning(\"Unrecognized", "Defaults to the dtype of the input data, or, if", "base = \"SI\" else: base = \"CGS\" raise YTEquivalentDimsError(my_units, other_units,", "invert_units, left_shift: bitop_units, right_shift: bitop_units, greater: comparison_unit, greater_equal: comparison_unit, less:", "in the zeroth element of third element of the returned", "np.void(pickle.dumps(self.units.registry.lut)) if dataset_name is None: dataset_name = 'array_data' f =", "it. Returns ------- Quantity object with data converted to cgs", "comments : str, optional String that will be prepended to", "in (multiply_units, divide_units): out_arr, out_arr, unit = handle_multiply_divide_units( unit, units,", "logical_and, logical_or, logical_xor, maximum, minimum, fmax, fmin, copysign, nextafter, ldexp,", "text file. Each row in the text file must have", "a YTQuantity with # size > 1 out_arr = YTArray(np.asarray(out_arr),", "for this unitful quantity, try the :meth:`list_equivalencies` method. Default: None", "def unit_quantity(self): \"\"\"Get a YTQuantity with the same unit as", "isnan, signbit, floor, ceil, trunc, modf, frexp, fabs, spacing, positive,", "if d.shape == self.shape and d.dtype == self.dtype: d[...] =", "exp, exp2, log, log2, log10, expm1, log1p, sqrt, square, reciprocal,", "file # created before we serialized the unit symbol lookup", "units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) else: if raise_error: raise YTUfuncUnitError(ufunc, *units)", "if this is greater than or equal to other. \"\"\"", "array_like_field(data, x, field): field = data._determine_fields(field)[0] if isinstance(field, tuple): finfo", "self).__rtruediv__(lo) def __itruediv__(self, other): \"\"\" See __div__. \"\"\" oth =", "if not power.units.is_dimensionless: raise YTUnitOperationError('power', power.unit) # Work around a", "units arg if it's not already a Unit obj. if", "method. Default: None Returns ------- NumPy array \"\"\" if units", "out, out_arr, unit = handle_multiply_divide_units( unit, units, out, out_arr) else:", "\"\"\" See __div__. \"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray,", "inputs has not been\" \"added to YTArray.\" % (str(ufunc), len(inputs)))", "inp, u = get_inp_u_unary(ufunc, inputs, out_arr) unit = self._ufunc_registry[context[0]](u) ret_class", "return super(YTArray, self).__le__(oth) def __eq__(self, other): \"\"\" Test if this", "object to the array data. Parameters ---------- input_array : :obj:`!iterable`", "r\"\"\"Attempts read in and convert a dataset in an hdf5", ": Pint UnitRegistry, optional The Pint UnitRegistry to use in", "in equiv_dims.free_symbols: base = \"SI\" else: base = \"CGS\" raise", "or filled with zeros if not inp.units.is_dimensionless and np.any(ret): raise", "you wish to convert to. equiv : string The equivalence", "\"\"\" Convert the array and units to the given units.", "this array and a value of 1.0\"\"\" return YTQuantity(1.0, self.units)", "right. \"\"\" oth = validate_comparison_units(self, other, 'less_than or equal') return", "units) def uvstack(arrs): \"\"\"Stack arrays in sequence vertically (row wise)", "unit_state + np_ret[3:] return new_ret def __setstate__(self, state): \"\"\"Pickle setstate", "# @property def value(self): \"\"\"Get a copy of the array", "unit information. \"\"\" if _astropy.units is None: raise ImportError(\"You don't", "array data. Parameters ---------- input_array : :obj:`!iterable` A tuple, list,", "input_units=None, registry=None, dtype=np.float64, bypass_validation=False): if not isinstance(input_scalar, (numeric_type, np.number, np.ndarray)):", "lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rdiv__(lo) def __idiv__(self, other):", "uunion1d(A, B) YTArray([ 1., 2., 3., 4.]) cm \"\"\" v", "a YTQuantity with the same unit as this array and", "arrays: if hasattr(array, \"units\"): units.append(str(array.units)) else: units.append(\"dimensionless\") if header !=", "def __mul__(self, right_object): \"\"\" Multiply this YTArray by the object", "= handle_comparison_units( inps, units, ufunc, ret_class, raise_error=True) unit = unit_operator(*units)", "Check that the other is a YTArray. oth = validate_comparison_units(", "other_units): \"\"\" Takes a Unit object, or string of known", "used in the conversion. If not specified, the default base", "other.units.expr: if this.units.base_value == other.units.base_value: return other if not this.units.same_dimensions_as(other.units):", "See __div__. \"\"\" oth = sanitize_units_mul(self, other) np.true_divide(self, oth, out=self)", ">>> np.all(a == b) True \"\"\" def __new__(cls, input_scalar, input_units=None,", "\"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self, other) np.true_divide(self, oth,", "if this is less than or equal to the object", "arccosh, arctanh, deg2rad, rad2deg, \\ bitwise_and, bitwise_or, bitwise_xor, invert, left_shift,", "sign, conj, exp, exp2, log, log2, log10, expm1, log1p, sqrt,", "spacing, positive, isnat, ) binary_operators = ( add, subtract, multiply,", "if not inp.units.is_dimensionless and np.any(ret): raise YTUnitOperationError(op_string, inp.units, dimensionless) return", "left_object): \"\"\" See __sub__. \"\"\" lo = sanitize_units_add(self, left_object, \"subtraction\")", "unit and shape as this array\"\"\" return np.ones_like(self) ua =", "return_without_unit, arccos: return_without_unit, arctan: return_without_unit, arctan2: arctan2_unit, arcsinh: return_without_unit, arccosh:", "default one will be used. Examples -------- >>> from pint", "self def __sub__(self, right_object): \"\"\" Subtract the object on the", "of your arrays are YTArrays.\") a1 = arrs[0] if not", "handle units arg if it's not already a Unit obj.", "documentation for the standard library pickle module: http://docs.python.org/2/library/pickle.html Unit metadata", "__repr__(self): \"\"\" \"\"\" return super(YTArray, self).__repr__()+' '+self.units.__repr__() def __str__(self): \"\"\"", "import numpy as np from distutils.version import LooseVersion from functools", "``footer`` strings, to mark them as comments. Default: '# ',", "The file to write the YTArrays to. arrays : list", "integrated with yt datasets: >>> import yt >>> ds =", "array. Optionally, an equivalence can be specified to convert to", "string of known unit symbol, and check that it is", "super(YTArray, self).__abs__() # # Start comparison operators. # def __lt__(self,", "yt UnitRegistry, optional A yt unit registry to use in", "cross product to two YT arrays. This wrapper around numpy.cross", "savetxt(fname, arrays, fmt='%.18e', delimiter='\\t', header='', footer='', comments='#'): r\"\"\" Write YTArrays", "\"\"\" if LooseVersion(np.__version__) < LooseVersion('1.10.0'): norm = np.linalg.norm(data, ord=ord, axis=axis)", "base units in the specified unit system. Parameters ---------- unit_system", "(cm**3, not cm^3). registry : A UnitRegistry object The registry", "and units to the equivalent mks units. \"\"\" return self.convert_to_units(self.units.get_mks_equivalent())", "units.\") v.units = a1.units return v def uconcatenate(arrs, axis=0): \"\"\"Concatenate", "= sanitize_units_mul(self, left_object) return super(YTArray, self).__rtruediv__(lo) def __itruediv__(self, other): \"\"\"", "memodict is None: memodict = {} ret = super(YTArray, self).__deepcopy__(memodict)", "self).__radd__(lo) def __iadd__(self, other): \"\"\" See __add__. \"\"\" oth =", "4.01, 5.02, 6.03]) m NumPy ufuncs will pass through units", "Overwrite without deleting if we can get away with it.", "len(str(unit)) in [2,3]: unit = str(unit).replace(\"yr\",\"year\") units.append(\"%s**(%s)\" % (unit, Rational(pow)))", "YTArray. Parameters ---------- filename: string The filename to of the", "the same units. Parameters ---------- unit : string The unit", "new_array = type(self)(self.ndview * conversion_factor, new_units) if offset: np.subtract(new_array, offset*new_array.uq,", "in the text file must have the same number of", "sequence horizontally (column wise) while preserving units This is a", "Unit(registry=getattr(unit1, 'registry', None)) elif ufunc is power: unit2 = inp2", "units This is a wrapper around np.vstack that preserves units.", "redefine this here to avoid a circular import from yt.funcs", "syntax (cm**3, not cm^3). registry : A UnitRegistry object The", "unit attribute set to the sympy expression 1/1 rather than", "not np.any(this_object): return ret raise YTUnitOperationError(op_string, inp.units, ret.units) ret =", "Unit object. \"\"\" # let Unit() handle units arg if", "with the data in the supplied units, and returns it.", "the version of numpy installed is older than numpy 1.10.0.", "the unit header. footer : str, optional String that will", "r\"\"\" Load YTArrays with unit information from a text file.", "and shape as this array\"\"\" return np.ones_like(self) ua = unit_array", "and units to the equivalent cgs units. \"\"\" return self.convert_to_units(self.units.get_cgs_equivalent())", "uconcatenate(arrs, axis=0): \"\"\"Concatenate a sequence of arrays. This wrapper around", "super(YTArray, self).__rfloordiv__(lo) def __ifloordiv__(self, other): \"\"\" See __div__. \"\"\" oth", "arcsinh, arccosh, arctanh, deg2rad, rad2deg, invert, logical_not, isreal, iscomplex, isfinite,", "input_array.view(cls) if registry is None and isinstance(input_units, (str, bytes)): if", "f[group_name] else: g = f dataset = g[dataset_name] data =", "out_arr) unit = self._ufunc_registry[context[0]](u) ret_class = type(self) elif ufunc in", "in arrs): raise RuntimeError(\"Not all of your arrays are YTArrays.\")", "already formed ndarray instance # We first cast to be", "units = func(*args, **kwargs) if ret.shape == (): return YTQuantity(ret,", "handle_comparison_units(inps, units, ufunc, ret_class, raise_error=False): if units[0] != units[1]: u1d", "other, 'not equal') return super(YTArray, self).__ne__(oth) def __ge__(self, other): \"\"\"", "is None: unit1 = Unit(registry=getattr(unit2, 'registry', None)) if unit2 is", "passthrough_unit, ceil: passthrough_unit, trunc: passthrough_unit, spacing: passthrough_unit, positive: passthrough_unit, divmod_:", "input validation is skipped. Using this option may produce corrupted,", "unitful quantity, try the :meth:`list_equivalencies` method. Examples -------- >>> a", "= validate_numpy_wrapper_units(v, arrs) return v def array_like_field(data, x, field): field", ": Unit object or string The units you want to", "units = \"*\".join(units) return unit_registry.Quantity(self.value, units) # # End unit", "inputs = context[1] if ufunc in unary_operators: out_arr, inp, u", "If one is not supplied, the default one will be", "The full license is in the file COPYING.txt, distributed with", "next_one = True else: # Here we catch the first", "full details. \"\"\" v = np.cross(arr1, arr2, axisa=axisa, axisb=axisb, axisc=axisc,", "return out_arr out_arr.units = unit if out_arr.size == 1: return", "isnat, heaviside except ImportError: positive, divmod_, isnat, heaviside = (None,)*4", "# numpy issue #9081 return type(self)(super(YTArray, self).__pos__(), self.units) @return_arr def", "else: # Here we catch the first line of numbers", "import \\ YTUnitOperationError, YTUnitConversionError, \\ YTUfuncUnitError, YTIterableUnitCoercionError, \\ YTInvalidUnitEquivalence, YTEquivalentDimsError", "axis=None): \"\"\"Applies the cross product to two YT arrays. This", "YTArray(np.asarray(out_arr), unit) else: out_arr = ret_class(np.asarray(out_arr), unit) if out is", "None f.close() registry = UnitRegistry(lut=unit_lut, add_default_symbols=False) return cls(data, units, registry=registry)", "if next_one: units = words[1:] if len(words) == 2 and", "out_arr.units = unit if out_arr.size == 1: return YTQuantity(np.array(out_arr), unit)", "if isinstance(unit2, np.ndarray): if isinstance(unit2, YTArray): if unit2.units.is_dimensionless: pass else:", "del d.attrs[k] else: del f[dataset_name] d = g.create_dataset(dataset_name, data=self) else:", "= handle_multiply_divide_units( unit, units, out, out_arr) else: raise RuntimeError( \"Support", "which should be used if that particular equivalency requires them.", "np.true_divide(self, oth, out=self) return self def __floordiv__(self, right_object): ro =", "== np.bool_(False): units = (units[1], units[1]) elif any_nonzero[1] == np.bool_(False):", "em_dimensions from yt.utilities.exceptions import \\ YTUnitOperationError, YTUnitConversionError, \\ YTUfuncUnitError, YTIterableUnitCoercionError,", "return YTQuantity(ret, self.units, bypass_validation=True) else: if hasattr(self, 'units'): ret.units =", "= validate_comparison_units(self, other, 'greater than') return super(YTArray, self).__gt__(oth) # #", "before the unit header. footer : str, optional String that", "YTArray): if unit2.units.is_dimensionless: pass else: raise YTUnitOperationError(ufunc, unit1, unit2) unit2", "license is in the file COPYING.txt, distributed with this software.", "if isinstance(input_array[0], YTArray): return YTArray(np.array(input_array, dtype=dtype), input_array[0].units, registry=registry) # Input", "bitop_units, bitwise_xor: bitop_units, invert: invert_units, left_shift: bitop_units, right_shift: bitop_units, greater:", "self, conv_unit.dimensions, **kwargs) if isinstance(new_arr, tuple): try: return type(self)(new_arr[0], new_arr[1]).in_units(unit)", "in (multiply_units, divide_units): out, out_arr, unit = handle_multiply_divide_units( unit, units,", "str, optional The string used to separate values. By default,", "isinf: return_without_unit, isnan: return_without_unit, signbit: return_without_unit, copysign: passthrough_unit, nextafter: preserve_units,", "cos: return_without_unit, tan: return_without_unit, sinh: return_without_unit, cosh: return_without_unit, tanh: return_without_unit,", "yt.YTArray([1, 2, 3], 'cm') >>> B = yt.YTArray([2, 3, 4],", "first cast to be our class type obj = np.asarray(input_array,", "__or__(self, right_object): return super(YTArray, self).__or__(right_object) def __ror__(self, left_object): return super(YTArray,", "inps, units = handle_comparison_units( inps, units, ufunc, ret_class) elif unit_operator", "to PR #1728 # when the pickle format changed if", "**kwargs): ret, units = func(*args, **kwargs) if ret.shape == ():", "ret_class = get_inp_u_binary(ufunc, inputs) if unit_operator in (comparison_unit, arctan2_unit): inps,", "we use the `units` # attribute. if isinstance(ret, YTArray): if", "---------- arr : AstroPy Quantity The Quantity to convert from.", "-------- >>> from pint import UnitRegistry >>> import numpy as", "[\"dimensionless\"]*num_cols arrays = np.loadtxt(fname, dtype=dtype, comments=comments, delimiter=delimiter, converters=None, unpack=True, usecols=usecols,", "@property def unit_quantity(self): \"\"\"Get a YTQuantity with the same unit", "\"\"\" Convert the array and units to the equivalent cgs", "conv_unit.has_equivalent(equiv) or this_equiv._one_way) if self.has_equivalent(equiv) and oneway_or_equivalent: new_arr = this_equiv.convert(", "ValueError: mylog.warning(\"Unrecognized character at beginning of line: \\\"%s\\\".\" % line[0])", "If the other object is a YTArray and has the", "1., 2., 3., 2., 3., 4.]) cm \"\"\" v =", "last dimension. This is a wrapper around np.stack that preserves", "unit, units, out, out_arr) else: raise RuntimeError( \"Support for the", "data as a numpy ndarray\"\"\" return np.array(self) v = value", ">>> uintersect1d(A, B) YTArray([ 2., 3.]) cm \"\"\" v =", "__eq__(self, other): \"\"\" Test if this is equal to the", "= np.stack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v def array_like_field(data,", "= \"*\".join(units) return unit_registry.Quantity(self.value, units) # # End unit conversion", "and power == -1: ret = super(YTArray, self).__pow__(power) return type(self)(ret,", "a tuple used to restore the state of the ndarray.", "system, and returns it in that system's base units. Parameters", "print(np.log10(a)) 1.07918124605 YTQuantity is tightly integrated with yt datasets: >>>", "the data in the iterable. return YTArray(input_object) return input_object else:", "the conversion. If one is not supplied, the default one", "footer : str, optional String that will be written at", "and ``footer`` strings, to mark them as comments. Default: '#", "if this is not equal to the object on the", "return unit**0.5 @lru_cache(maxsize=128, typed=False) def multiply_units(unit1, unit2): return unit1 *", "(None,)*4 from yt.units.unit_object import Unit, UnitParseError from yt.units.unit_registry import UnitRegistry", "axis while preserving units The axis parameter specifies the index", "tuple, itself a tuple used to restore the state of", "#9081 return type(self)(super(YTArray, self).__pos__(), self.units) @return_arr def dot(self, b, out=None):", "super(YTArray, self).__lt__(oth) def __le__(self, other): \"\"\"Test if this is less", "reciprocal_unit(unit): return unit**-1 def passthrough_unit(unit, unit2=None): return unit def return_without_unit(unit,", "registry is None: ret.units = input_array.units else: units = Unit(str(input_array.units),", "self).__neg__() def __mul__(self, right_object): \"\"\" Multiply this YTArray by the", "not input_units.registry: units = Unit(str(input_units), registry=registry) else: units = input_units", "to. \"\"\" new_units = _unit_repr_check_same(self.units, units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units)", "units, ufunc, ret_class, raise_error=False): if units[0] != units[1]: u1d =", "greater_equal, less, less_equal, not_equal, equal, logical_and, \\ logical_or, logical_xor, logical_not,", "\"thermal\") \"\"\" conv_unit = Unit(unit, registry=self.units.registry) if self.units.same_dimensions_as(conv_unit): return self.in_units(conv_unit)", "= state[0] except TypeError: # this case happens when we", "#1728 # when the pickle format changed if len(lut['m']) ==", "arcsin, arccos, arctan, sinh, cosh, tanh, arcsinh, arccosh, arctanh, deg2rad,", "equivalence : string, optional The equivalence you wish to use.", "isinstance(input_scalar, (numeric_type, np.number, np.ndarray)): raise RuntimeError(\"YTQuantity values must be numeric\")", "the array and units to the equivalent mks units. \"\"\"", "and unit.base_value != 1.0: if not units[0].is_dimensionless: if units[0].dimensions ==", "End comparison operators # # # Begin reduction operators #", "\\ logical_or, logical_xor, logical_not, maximum, minimum, fmax, fmin, \\ isreal,", "a YTArray. oth = validate_comparison_units( self, other, 'greater than or", "super(YTArray, self).__rxor__(left_object) def __ixor__(self, other): np.bitwise_xor(self, other, out=self) return self", "that will be written at the beginning of the file,", "be defined for all numpy versions, see # numpy issue", "unit registry to use in the conversion. If one is", "= ndview @property def unit_quantity(self): \"\"\"Get a YTQuantity with the", "NotImplemented: return input_array.view(cls) if registry is None and isinstance(input_units, (str,", "k, v in info.items(): d.attrs[k] = v f.close() @classmethod def", "can't convert to \" + \"an AstroPy quantity.\") return self.value*_astropy.units.Unit(str(self.units),", "typed=False) def square_unit(unit): return unit*unit @lru_cache(maxsize=128, typed=False) def divide_units(unit1, unit2):", "software. #----------------------------------------------------------------------------- import copy import numpy as np from distutils.version", "\"\"\" from __future__ import print_function #----------------------------------------------------------------------------- # Copyright (c) 2013,", "handle if # it's a str. units = Unit(input_units, registry=registry)", "is related by only a constant factor but not in", "or newer from numpy import positive, divmod as divmod_, isnat,", "uconcatenate((A, B)) YTArray([ 1., 2., 3., 2., 3., 4.]) cm", "to convert to an equivalent quantity which is not in", "it. if d.shape == self.shape and d.dtype == self.dtype: d[...]", "None and isinstance(input_units, (str, bytes)): if input_units.startswith('code_'): raise UnitParseError( \"Code", "Convert a Pint \"Quantity\" to a YTArray or YTQuantity. Parameters", "is called inside pickle.read() and restores the unit data from", "YTArray >>> a = YTArray([1, 2, 3], 'cm') >>> b", "Modified BSD License. # # The full license is in", "absolute, rint, sign, conj, exp, exp2, log, log2, log10, expm1,", "YTUfuncUnitError, YTIterableUnitCoercionError, \\ YTInvalidUnitEquivalence, YTEquivalentDimsError from yt.utilities.lru_cache import lru_cache from", "passthrough_unit, ldexp: bitop_units, frexp: return_without_unit, floor: passthrough_unit, ceil: passthrough_unit, trunc:", "YTArray, then one of the arrays must be # dimensionless", "convert from. unit_registry : yt UnitRegistry, optional A yt unit", "in sequence vertically (row wise) while preserving units This is", "numpy.intersect1d that preserves units. All input arrays must have the", "it's not a Unit object. # don't handle all the", "fname : str The file to write the YTArrays to.", "field): field = data._determine_fields(field)[0] if isinstance(field, tuple): finfo = data.ds._get_field_info(field[0],field[1])", "needs to be defined for all numpy versions, see #", "if possible oth = validate_comparison_units(self, other, 'less_than') return super(YTArray, self).__lt__(oth)", "units of cgs are used. Examples -------- >>> E =", "don't call YTArray directly. return type(args[0])(ret, units) return wrapped @lru_cache(maxsize=128,", "to read from. If the dataset has a units attribute,", "the other object is a YTArray before we use the", "logical_and, \\ logical_or, logical_xor, logical_not, maximum, minimum, fmax, fmin, \\", "= Unit() elif isinstance(input_units, Unit): if registry and registry is", "registry to create units from. If input_units is already associated", "UnitRegistry(lut=unit_lut, add_default_symbols=False) return cls(data, units, registry=registry) # # Start convenience", "See __add__. \"\"\" lo = sanitize_units_add(self, left_object, \"addition\") return super(YTArray,", "the units obj.units = units return obj def __repr__(self): \"\"\"", "create units from. If input_units is already associated with a", "to \" + \"an AstroPy quantity.\") return self.value*_astropy.units.Unit(str(self.units), **kwargs) @classmethod", "dataset to create in the file. info: dictionary A dictionary", "if # it's a str. units = Unit(input_units, registry=registry) #", "always defined for numpy arrays. \"\"\" np_ret = super(YTArray, self).__reduce__()", "v = validate_numpy_wrapper_units(v, [arr1, arr2]) return v def uunion1d(arr1, arr2):", "a view of the array data.\"\"\" return self.ndarray_view() d =", "divide) and method == 'reduce': power_sign = POWER_SIGN_MAPPING[ufunc] if 'axis'", "return obj if input_array is NotImplemented: return input_array.view(cls) if registry", "preserve_units, modf: passthrough_unit, ldexp: bitop_units, frexp: return_without_unit, floor: passthrough_unit, ceil:", "temp, velx = yt.loadtxt(\"sphere.dat\", usecols=(1,2), delimiter=\"\\t\") \"\"\" f = open(fname,", "the array. Powers must be specified using python syntax (cm**3,", "dot = np.dot(op1.d, op2.d) units = op1.units*op2.units if dot.shape ==", "info = {} info['units'] = str(self.units) info['unit_registry'] = np.void(pickle.dumps(self.units.registry.lut)) if", "if context is None: if ret.shape == (): return ret[()]", "= sanitize_units_mul(self, left_object) return super(YTArray, self).__rdiv__(lo) def __idiv__(self, other): \"\"\"", "with a unit. Parameters ---------- input_scalar : an integer or", "numpy ndarray\"\"\" return np.array(self) v = value @property def ndview(self):", "Examples -------- >>> a = YTQuantity(4.0, \"cm**2/s\") >>> b =", "**kwargs): func = getattr(ufunc, method) if 'out' in kwargs: out_orig", "character at beginning of line: \\\"%s\\\".\" % line[0]) f.close() if", "have the same number of values. Parameters ---------- fname :", "assume_unique=assume_unique) v = validate_numpy_wrapper_units(v, [arr1, arr2]) return v def uunion1d(arr1,", "inp.in_units('radian').v if out_arr is not None: out_arr = ufunc(inp).view(np.ndarray) return", "None, results in all columns being read. comments : str,", "preserves units. \"\"\" v = np.vstack(arrs) v = validate_numpy_wrapper_units(v, arrs)", "\"\"\" lo = sanitize_units_add(self, left_object, \"subtraction\") return super(YTArray, self).__rsub__(lo) def", "\"yr\" as \"year\" if str(unit).endswith(\"yr\") and len(str(unit)) in [2,3]: unit", "return True def return_arr(func): @wraps(func) def wrapped(*args, **kwargs): ret, units", "inp.units.is_dimensionless and np.any(ret): raise YTUnitOperationError(op_string, inp.units, dimensionless) return ret def", "negative, power, remainder, mod, absolute, rint, \\ sign, conj, exp,", "this YTArray or YTQuantity has an equivalent unit in *equiv*.", "returns a YTArray # with a unit attribute set to", "def __abs__(self): \"\"\" Return a YTArray with the abs of", "YTUnitOperationError(ufunc, unit1, unit2) unit2 = 1.0 return (inp1, inp2), (unit1,", "= validate_comparison_units( self, other, 'greater than or equal') return super(YTArray,", "units. \"\"\" ro = sanitize_units_add(self, right_object, \"subtraction\") return super(YTArray, self).__sub__(ro)", "file. fmt : str or sequence of strs, optional A", "Attach the units obj.units = units return obj def __repr__(self):", "fabs, spacing, positive, isnat, ) binary_operators = ( add, subtract,", "is other.units.expr: if this.units.base_value == other.units.base_value: return other if not", "when it would be annoying to deal with them. >>>", "power : float or dimensionless YTArray. The pow value. \"\"\"", ">>> a = YTArray([1, 2, 3], 'cm') >>> b =", "'units', NULL_UNIT) for _ in input_object]): raise YTIterableUnitCoercionError(input_object) # This", "\"year\" if str(unit).endswith(\"yr\") and len(str(unit)) in [2,3]: unit = str(unit).replace(\"yr\",\"year\")", "object with data converted to mks units. \"\"\" return self.in_units(self.units.get_mks_equivalent())", "-------- >>> A = yt.YTArray([1, 2, 3], 'cm') >>> B", "1., 2., 3., 4.]) cm \"\"\" v = np.union1d(arr1, arr2)", "text file must have the same number of values. Parameters", "else: # If the other object is not a YTArray,", "equivalence you wish to use. To see which equivalencies are", "library pickle module: http://docs.python.org/2/library/pickle.html Unit metadata is encoded in the", "YTQuantity(np.asarray(out_arr), unit) else: if ret_class is YTQuantity: # This happens", "unit object to the array data. Parameters ---------- input_array :", "> 1 return YTArray(np.array(out_arr), unit) return ret_class(np.array(out_arr, copy=False), unit) else:", ">>> a + b YTArray([ 401., 502., 603.]) cm >>>", "out_arr, unit = handle_multiply_divide_units( unit, units, out_arr, out_arr) else: raise", "that function for details. \"\"\" return self.in_units(units, equivalence=equivalence, **kwargs) def", "!= getattr(_, 'units', NULL_UNIT) for _ in input_object]): raise YTIterableUnitCoercionError(input_object)", "info to write to append as attributes to the dataset.", "open(fname, 'r') next_one = False units = [] num_cols =", "\"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rtruediv__(lo) def __itruediv__(self,", "is not in the same dimensions. .. note:: All additional", "unit_registry=None): \"\"\" Convert a Pint \"Quantity\" to a YTArray or", "not in the same dimensions. .. note:: All additional keyword", "super(YTArray, self).__rtruediv__(lo) def __itruediv__(self, other): \"\"\" See __div__. \"\"\" oth", "from functools import wraps from numpy import \\ add, subtract,", "preserves units. \"\"\" dot = np.dot(op1.d, op2.d) units = op1.units*op2.units", "units[1]: u1d = units[0].is_dimensionless u2d = units[1].is_dimensionless any_nonzero = [np.any(inps[0]),", "---------- power : float or dimensionless YTArray. The pow value.", "super(YTArray, self).__gt__(oth) # # End comparison operators # # #", "= copy.deepcopy(x) arr.convert_to_units(units) return arr if isinstance(x, np.ndarray): return data.ds.arr(x,", "import cPickle as pickle if info is None: info =", "hypot, bitwise_and, bitwise_or, bitwise_xor, left_shift, right_shift, greater, greater_equal, less, less_equal,", "\\\"%s\\\".\" % line[0]) f.close() if len(units) != num_cols: mylog.warning(\"Malformed or", "import Rational from yt.units.unit_lookup_table import \\ default_unit_symbol_lut from yt.units.equivalencies import", "registry and registry is not input_units.registry: units = Unit(str(input_units), registry=registry)", "quantity to convert from. unit_registry : Pint UnitRegistry, optional The", "\"\"\"Get a copy of the array data as a numpy", "isnat: return_without_unit, heaviside: preserve_units, } __array_priority__ = 2.0 def __new__(cls,", "isinstance(ret, YTArray): if not inp.units.same_dimensions_as(ret.units): # handle special case of", "log1p, sqrt, square, reciprocal, sin, cos, tan, arcsin, arccos, arctan,", "f.close() if len(units) != num_cols: mylog.warning(\"Malformed or incomplete units header.", "= np.cross(arr1, arr2, axisa=axisa, axisb=axisb, axisc=axisc, axis=axis) units = arr1.units", "preserving units The axis parameter specifies the index of the", "sqrt, square, reciprocal, sin, cos, tan, arcsin, arccos, arctan, sinh,", "while preserving units This is a wrapper around np.hstack that", "12 g/cm**3 and strip them when it would be annoying", "a YTArray, then one of the arrays must be #", "\"\"\" return np.array(self) @classmethod def from_astropy(cls, arr, unit_registry=None): \"\"\" Convert", "on the right of the `/` operator. \"\"\" ro =", "self.units.is_dimensionless and power == -1: ret = super(YTArray, self).__pow__(power) return", "f[group_name] else: g = f.create_group(group_name) else: g = f if", "*equiv*. \"\"\" return self.units.has_equivalent(equiv) def ndarray_view(self): \"\"\" Returns a view", "cm \"\"\" v = np.concatenate(arrs, axis=axis) v = validate_numpy_wrapper_units(v, arrs)", "used to separate values. By default, this is any whitespace.", "optional String that will be written at the beginning of", "YTUnitConversionError( my_units, my_units.dimensions, other_units, other_units.dimensions) return other_units unary_operators = (", ": str Filename to read. dtype : data-type, optional Data-type", "arr : AstroPy Quantity The Quantity to convert from. unit_registry", "else: return self.to_equivalent(units, equivalence, **kwargs) def to(self, units, equivalence=None, **kwargs):", "are YTArrays.\") a1 = arrs[0] if not all(a.units == a1.units", "logic adds significant overhead. If set, input_units *must* be a", "Unit class handle if # it's a str. units =", "are supported for this unitful quantity, try the :meth:`list_equivalencies` method.", "equivalence is None: new_units = _unit_repr_check_same(self.units, units) (conversion_factor, offset) =", "def from_pint(cls, arr, unit_registry=None): \"\"\" Convert a Pint \"Quantity\" to", "except ValueError: mylog.warning(\"Unrecognized character at beginning of line: \\\"%s\\\".\" %", "if ret.shape == (): return YTQuantity(ret, self.units, bypass_validation=True) else: if", "divide_units, negative: passthrough_unit, power: power_unit, remainder: preserve_units, mod: preserve_units, fmod:", "# Input array is an already formed ndarray instance #", "\"Perhaps you meant to do something like this instead: \\n\"", "An ndarray subclass that attaches a symbolic unit object to", "unit_operator = self._ufunc_registry[context[0]] inps, units, ret_class = get_inp_u_binary(ufunc, inputs) if", "# # Begin reduction operators # @return_arr def prod(self, axis=None,", "a new AstroPy quantity with the same unit information. \"\"\"", "Parameters ---------- arr : Pint Quantity The Quantity to convert", "to_value(self, units=None, equivalence=None, **kwargs): \"\"\" Creates a copy of this", "inp.units, ret.units) ret = ret.in_units(inp.units) else: # If the other", "Parameters ---------- unit : string The unit that you wish", "new_arr[1]).in_units(unit) except YTUnitConversionError: raise YTInvalidUnitEquivalence(equiv, self.units, unit) else: return new_arr.in_units(unit)", "self.in_units(self.units.get_mks_equivalent()) def to_equivalent(self, unit, equiv, **kwargs): \"\"\" Convert a YTArray", "return super(YTArray, self).__repr__()+' '+self.units.__repr__() def __str__(self): \"\"\" \"\"\" return str(self.view(np.ndarray))", "= get_binary_op_return_class(type(inp1), type(inp2)) if unit1 is None: unit1 = Unit(registry=getattr(unit2,", "isinf, isnan, signbit, floor, ceil, trunc, modf, frexp, fabs, spacing,", "log10: return_without_unit, expm1: return_without_unit, log1p: return_without_unit, sqrt: sqrt_unit, square: square_unit,", "Unit(registry=getattr(unit2, 'registry', None)) if unit2 is None and ufunc is", "isinstance(new_arr, tuple): try: return type(self)(new_arr[0], new_arr[1]).in_units(unit) except YTUnitConversionError: raise YTInvalidUnitEquivalence(equiv,", "used. Examples -------- >>> E = YTQuantity(2.5, \"erg/s\") >>> E_new", "something like this instead: \\n\" \"ds.arr(%s, \\\"%s\\\")\" % (input_array, input_units)", "!= (): ret = ret.view(YTArray) if context is None: if", "inputs[0] u = getattr(inp, 'units', None) if u is None:", "else: del f[dataset_name] d = g.create_dataset(dataset_name, data=self) else: d =", "import YTQuantity >>> a = YTQuantity(1, 'cm') >>> b =", "hasattr(array, \"units\"): units.append(str(array.units)) else: units.append(\"dimensionless\") if header != '': header", "def __ne__(self, other): \"\"\" Test if this is not equal", "in f: g = f[group_name] else: g = f.create_group(group_name) else:", "@lru_cache(maxsize=128, typed=False) def power_unit(unit, power): return unit**power @lru_cache(maxsize=128, typed=False) def", "``yt.loadtxt``. Examples -------- >>> sp = ds.sphere(\"c\", (100,\"kpc\")) >>> a", "than ytarray. Returns ------- View of this array's data. \"\"\"", "used instead of the registry associated with the unit object.", "Start operation methods # if LooseVersion(np.__version__) < LooseVersion('1.13.0'): def __add__(self,", "therefore a bare NumPy array. Optionally, an equivalence can be", "str, optional String that will be written at the beginning", "get the bare quantity in. If not specified, the value", "validate_comparison_units(self, other, 'greater than') return super(YTArray, self).__gt__(oth) # # End", "k in d.attrs.keys(): del d.attrs[k] else: del f[dataset_name] d =", "wrapper around np.hstack that preserves units. \"\"\" v = np.hstack(arrs)", "if registry is None and isinstance(input_units, (str, bytes)): if input_units.startswith('code_'):", "self).__or__(right_object) def __ror__(self, left_object): return super(YTArray, self).__ror__(left_object) def __ior__(self, other):", "self).__eq__(oth) def __ne__(self, other): \"\"\" Test if this is not", "axis is not None: units = self.units**self.shape[axis] else: units =", "1.543e+25 cm This is equivalent to: >>> b = YTQuantity(5,", "pow value. \"\"\" if isinstance(power, YTArray): if not power.units.is_dimensionless: raise", "and ret.shape != (): ret = ret.view(YTArray) if context is", "a in arrs[1:]): raise RuntimeError(\"Your arrays must have identical units.\")", "ord=None, axis=None, keepdims=False): \"\"\"Matrix or vector norm that preserves units", "(units[0], units[0]) else: if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) inps", "= YTArray(np.asarray(out_arr), unit) else: out_arr = ret_class(np.asarray(out_arr), unit) if out", "u1d = units[0].is_dimensionless u2d = units[1].is_dimensionless any_nonzero = [np.any(inps[0]), np.any(inps[1])]", "mean(self, axis=None, dtype=None, out=None): return super(YTArray, self).mean(axis, dtype, out), self.units", "coerce_iterable_units(other_object) # If the other object is a YTArray and", "YTArray): arr = copy.deepcopy(x) arr.convert_to_units(units) return arr if isinstance(x, np.ndarray):", "reciprocal: reciprocal_unit, sin: return_without_unit, cos: return_without_unit, tan: return_without_unit, sinh: return_without_unit,", "= np.asarray(input_array, dtype=dtype).view(cls) # Check units type if input_units is", "convert to. \"\"\" new_units = _unit_repr_check_same(self.units, units) (conversion_factor, offset) =", "the current units. equivalence : string, optional The equivalence you", "u is None: u = NULL_UNIT if u.dimensions is angle", "group_name is not None: g = f[group_name] else: g =", "else: unit = u**(power_sign*inp.size) else: unit = self._ufunc_registry[ufunc](u) ret_class =", "line[0]) f.close() if len(units) != num_cols: mylog.warning(\"Malformed or incomplete units", "def __sub__(self, right_object): \"\"\" Subtract the object on the right", "pint import UnitRegistry if unit_registry is None: unit_registry = UnitRegistry()", "arrays must have identical units.\") v.units = a1.units return v", "string The unit that you wish to convert to. equiv", "filled with zeros if not inp.units.is_dimensionless and np.any(ret): raise YTUnitOperationError(op_string,", "units = Unit(str(input_array.units), registry=registry) ret.units = units elif isinstance(input_units, Unit):", "rad2deg: return_without_unit, bitwise_and: bitop_units, bitwise_or: bitop_units, bitwise_xor: bitop_units, invert: invert_units,", "uq = unit_quantity @property def unit_array(self): \"\"\"Get a YTArray filled", "sanitize_units_add(self, left_object, \"subtraction\") return super(YTArray, self).__rsub__(lo) def __isub__(self, other): \"\"\"", "return_without_unit, logaddexp2: return_without_unit, true_divide: divide_units, floor_divide: divide_units, negative: passthrough_unit, power:", "if line[0] == comments: if next_one: units = words[1:] if", "distributed with this software. #----------------------------------------------------------------------------- import copy import numpy as", "right_object, \"addition\") return super(YTArray, self).__add__(ro) def __radd__(self, left_object): \"\"\" See", "= arr.unit ap_units = [] for base, exponent in zip(u.bases,", "[np.any(inps[0]), np.any(inps[1])] if any_nonzero[0] == np.bool_(False): units = (units[1], units[1])", "argument is ignored if the version of numpy installed is", "typed=False) def reciprocal_unit(unit): return unit**-1 def passthrough_unit(unit, unit2=None): return unit", ":meth:`list_equivalencies` method. Default: None Returns ------- NumPy array \"\"\" if", "self.in_units(self.units.get_base_equivalent(unit_system)) def in_cgs(self): \"\"\" Creates a copy of this array", "r'\\rm{' + k.replace('_', '\\ ') + '}') registry = UnitRegistry(lut=lut,", "when we try to load an old pickle file #", "__rdiv__(self, left_object): \"\"\" See __div__. \"\"\" lo = sanitize_units_mul(self, left_object)", "binary_operators = ( add, subtract, multiply, divide, logaddexp, logaddexp2, true_divide,", "not inp.units.is_dimensionless and np.any(ret): raise YTUnitOperationError(op_string, inp.units, dimensionless) return ret", "but not in the same units. Parameters ---------- unit :", "return YTQuantity(1.0, self.units) uq = unit_quantity @property def unit_array(self): \"\"\"Get", "if ``axis=0`` it will be the first dimension and if", "len(inputs) == 2: unit_operator = self._ufunc_registry[ufunc] inps, units, ret_class =", "as \"h\" if unit_str == \"h\": unit_str = \"hr\" ap_units.append(\"%s**(%s)\"", "= [] for base, exponent in arr._units.items(): bs = convert_pint_units(base)", "result. For example, if ``axis=0`` it will be the first", "if this is equal to the object on the right.", "ret.in_units(inp.units) return ret def sanitize_units_add(this_object, other_object, op_string): inp = coerce_iterable_units(this_object)", "units you want to convert to. \"\"\" new_units = _unit_repr_check_same(self.units,", "here, let the Unit class handle if # it's a", "elif any_nonzero[1] == np.bool_(False): units = (units[0], units[0]) else: if", "False oth = validate_comparison_units(self, other, 'equal') return super(YTArray, self).__eq__(oth) def", "= Unit(str(input_units), registry=registry) else: units = input_units else: # units", "with this quantity. Returns Unit object. \"\"\" # let Unit()", "``axis=0`` it will be the first dimension and if ``axis=-1``", "sphere stuff', delimiter=\"\\t\") \"\"\" if not isinstance(arrays, list): arrays =", "if ret.shape == (): return YTQuantity(ret, units) else: # This", "@classmethod def from_astropy(cls, arr, unit_registry=None): \"\"\" Convert an AstroPy \"Quantity\"", "out_arr) else: raise RuntimeError( \"Support for the %s ufunc has", "copysign, nextafter, ldexp, fmod, divmod_, heaviside ) trigonometric_operators = (", "---------- units : Unit object or string, optional The units", "A scalar associated with a unit. Parameters ---------- input_scalar :", "units = self.units**self.size return super(YTArray, self).prod(axis, dtype, out), units @return_arr", "'units', NULL_UNIT) def __pos__(self): \"\"\" Posify the data. \"\"\" #", "# redefine this here to avoid a circular import from", "NULL_UNIT = Unit() POWER_SIGN_MAPPING = {multiply: 1, divide: -1} #", "np from distutils.version import LooseVersion from functools import wraps from", "oneway_or_equivalent: new_arr = this_equiv.convert( self, conv_unit.dimensions, **kwargs) if isinstance(new_arr, tuple):", "np.concatenate(arrs, axis=axis) v = validate_numpy_wrapper_units(v, arrs) return v def ucross(arr1,", "multiply_units, divide: divide_units, logaddexp: return_without_unit, logaddexp2: return_without_unit, true_divide: divide_units, floor_divide:", "inputs) out_arr = func(np.asarray(inp), out=out, **kwargs) if ufunc in (multiply,", "else: # units kwarg set, but it's not a Unit", "return super(YTArray, self).__rmul__(lo) def __imul__(self, other): \"\"\" See __mul__. \"\"\"", "a valid unit object. Defaults to False. Examples -------- >>>", "them. >>> print(np.log10(a)) 1.07918124605 YTQuantity is tightly integrated with yt", "r\"\"\" Write YTArrays with unit information to a text file.", "to the array data. Parameters ---------- input_array : :obj:`!iterable` A", "super(YTArray, self).__ge__(oth) def __gt__(self, other): \"\"\" Test if this is", "ro = sanitize_units_add(self, right_object, \"subtraction\") return super(YTArray, self).__sub__(ro) def __rsub__(self,", "= np_ret[:2] + unit_state + np_ret[3:] return new_ret def __setstate__(self,", "yt.utilities.lru_cache import lru_cache from numbers import Number as numeric_type from", "axis=axis, keepdims=keepdims) if norm.shape == (): return YTQuantity(norm, data.units) return", "= None if len(inputs) == 1: _, inp, u =", "if not isinstance(other_units, Unit): other_units = Unit(other_units, registry=my_units.registry) equiv_dims =", "\"\"\" new_units = _unit_repr_check_same(self.units, units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units) self.units", "file, before the unit header. footer : str, optional String", "heaviside = (None,)*4 from yt.units.unit_object import Unit, UnitParseError from yt.units.unit_registry", "cm This is equivalent to: >>> b = YTArray(np.ones(5), 'code_length',", "sequence vertically (row wise) while preserving units This is a", "default_unit_symbol_lut from yt.units.equivalencies import equivalence_registry from yt.utilities.logger import ytLogger as", "__reduce__(self): \"\"\"Pickle reduction method See the documentation for the standard", "word in col_words: float(word) num_cols = len(col_words) break except ValueError:", "* conversion_factor, new_units) if offset: np.subtract(new_array, offset*new_array.uq, new_array) return new_array", "sum(self, axis=None, dtype=None, out=None): return super(YTArray, self).sum(axis, dtype, out), self.units", "preserve_units(unit1, unit2=None): return unit1 @lru_cache(maxsize=128, typed=False) def power_unit(unit, power): return", "avoids creating a YTQuantity with # size > 1 return", "yt.utilities.on_demand_imports import _astropy from sympy import Rational from yt.units.unit_lookup_table import", "specified, the value will be returned in the current units.", "mks units. \"\"\" return self.in_units(self.units.get_mks_equivalent()) def to_equivalent(self, unit, equiv, **kwargs):", "oth, out=self) return self def __truediv__(self, right_object): ro = sanitize_units_mul(self,", "The dtype of the array data. Examples -------- >>> from", "self def __pow__(self, power): \"\"\" Raise this YTArray to some", "isinstance(ret, YTQuantity) and ret.shape != (): ret = ret.view(YTArray) if", "arcsin: return_without_unit, arccos: return_without_unit, arctan: return_without_unit, arctan2: arctan2_unit, arcsinh: return_without_unit,", "str(self.view(np.ndarray)) + ' ' + str(self.units) # # Start unit", "super(YTArray, self).__rand__(left_object) def __iand__(self, other): np.bitwise_and(self, other, out=self) return self", "raise YTUnitOperationError(op_string, this.units, other.units) return other.in_units(this.units) return other @lru_cache(maxsize=128, typed=False)", "'#'. Examples -------- >>> temp, velx = yt.loadtxt(\"sphere.dat\", usecols=(1,2), delimiter=\"\\t\")", "validation is skipped. Using this option may produce corrupted, invalid", "unit, pow in powers_dict.items(): # we have to do this", "array data. Examples -------- >>> from yt import YTQuantity >>>", "a.in_cgs() YTArray([ 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24]) cm This is", "self).dot(b), self.units*b.units def __reduce__(self): \"\"\"Pickle reduction method See the documentation", "Work around a sympy issue (I think?) # # If", "super(YTArray, self).std(axis, dtype, out, ddof), self.units def __array_wrap__(self, out_arr, context=None):", "other, out=self) return self def __xor__(self, right_object): return super(YTArray, self).__xor__(right_object)", "np >>> ureg = UnitRegistry() >>> a = np.random.random(10) >>>", "array([ -inf, 0. , 0.30103 , 0.47712125, 0.60205999, 0.69897 ,", "b = ureg.Quantity(a, \"erg/cm**3\") >>> c = yt.YTArray.from_pint(b) \"\"\" p_units", "YTArray): if inp.units.same_dimensions_as(ret.units): ret.in_units(inp.units) return ret def sanitize_units_add(this_object, other_object, op_string):", "pickle.read() and restores the unit data from the metadata extracted", "# size > 1 return YTArray(np.array(out_arr), unit) return ret_class(np.array(out_arr, copy=False),", "v def array_like_field(data, x, field): field = data._determine_fields(field)[0] if isinstance(field,", "unit. Parameters ---------- input_scalar : an integer or floating point", "other): np.bitwise_or(self, other, out=self) return self def __xor__(self, right_object): return", "be the last dimension. This is a wrapper around np.stack", "def __reduce__(self): \"\"\"Pickle reduction method See the documentation for the", "# array filled with zero if not np.any(other_object): return ret.view(np.ndarray)", "not a Unit object. # don't handle all the cases", "502., 603.]) cm >>> b + a YTArray([ 4.01, 5.02,", "used to indicate the start of a comment; default: '#'.", "as the object # under consideration, convert so we don't", "# Overwrite without deleting if we can get away with", "not supplied, the default one will be used. \"\"\" #", "write to append as attributes to the dataset. group_name: string", "negative: passthrough_unit, power: power_unit, remainder: preserve_units, mod: preserve_units, fmod: preserve_units,", "== 1: _, inp, u = get_inp_u_unary(ufunc, inputs) out_arr =", "YTQuantity(4.0, \"cm**2/s\") >>> b = a.to_pint() \"\"\" from pint import", "np.asarray(out_orig[0]) else: out = None if len(inputs) == 1: _,", "subtracting with zero or # array filled with zero if", "this_equiv._one_way) if self.has_equivalent(equiv) and oneway_or_equivalent: new_arr = this_equiv.convert( self, conv_unit.dimensions,", "not specified, the value will be returned in the current", "or YTQuantity The unitful quantity to convert from. unit_registry :", "RuntimeError( \"Support for the %s ufunc has not been added", ">>> B = yt.YTArray([2, 3, 4], 'cm') >>> uintersect1d(A, B)", "an equivalent unit in *equiv*. \"\"\" return self.units.has_equivalent(equiv) def ndarray_view(self):", "else: if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) inps = (inps[0],", "dataset.attrs.keys(): unit_lut = pickle.loads(dataset.attrs['unit_registry'].tostring()) else: unit_lut = None f.close() registry", "in and convert a dataset in an hdf5 file into", "passthrough_unit(unit, unit2=None): return unit def return_without_unit(unit, unit2=None): return None def", "as well. group_name: string An optional group to read the", "v = validate_numpy_wrapper_units(v, arrs) return v def uhstack(arrs): \"\"\"Stack arrays", "return v if not all(isinstance(a, YTArray) for a in arrs):", "true_divide, \\ floor_divide, negative, power, remainder, mod, absolute, rint, \\", "compatible with this quantity. Returns Unit object. \"\"\" # let", ": string, optional The equivalence you wish to use. To", "YTQuantity(2.5, \"erg/s\") >>> E.convert_to_base(unit_system=\"galactic\") \"\"\" return self.convert_to_units(self.units.get_base_equivalent(unit_system)) def convert_to_cgs(self): \"\"\"", "right_shift, greater, greater_equal, less, less_equal, not_equal, equal, logical_and, logical_or, logical_xor,", "registry is not None: obj.units.registry = registry return obj if", "return unit**power @lru_cache(maxsize=128, typed=False) def square_unit(unit): return unit*unit @lru_cache(maxsize=128, typed=False)", "def passthrough_unit(unit, unit2=None): return unit def return_without_unit(unit, unit2=None): return None", "Returns a view into the array, but as an ndarray", "This is a wrapper around np.dot that preserves units. \"\"\"", "if axis is not None: units = self.units**self.shape[axis] else: units", "unit_str = \"hr\" ap_units.append(\"%s**(%s)\" % (unit_str, Rational(exponent))) ap_units = \"*\".join(ap_units)", "= line.strip().split(delimiter) for word in col_words: float(word) num_cols = len(col_words)", "= yt.YTArray.from_pint(b) \"\"\" p_units = [] for base, exponent in", "% line[0]) f.close() if len(units) != num_cols: mylog.warning(\"Malformed or incomplete", "this.units.base_value == other.units.base_value: return other if not this.units.same_dimensions_as(other.units): raise YTUnitOperationError(op_string,", "None: dataset_name = 'array_data' f = h5py.File(filename) if group_name is", "input_units.startswith('code_'): raise UnitParseError( \"Code units used without referring to a", "ret.units) ret = ret.in_units(inp.units) else: # If the other object", "convert_to_cgs(self): \"\"\" Convert the array and units to the equivalent", "valid unit object. Defaults to False. Examples -------- >>> from", "v f.close() @classmethod def from_hdf5(cls, filename, dataset_name=None, group_name=None): r\"\"\"Attempts read", "Returns ------- View of this array's data. \"\"\" return self.view(np.ndarray)", "deepcopy of arrays and quantities. \"\"\" if memodict is None:", "be # dimensionless or filled with zeros if not inp.units.is_dimensionless", "None) if equiv_dims == other_units.dimensions: if current_mks in equiv_dims.free_symbols: base", "Data-type of the resulting array; default: float. delimiter : str,", "if group_name in f: g = f[group_name] else: g =", "Unit): other_units = Unit(other_units, registry=my_units.registry) equiv_dims = em_dimensions.get(my_units.dimensions, None) if", "2nd, 5th and 6th columns. The default, None, results in", "and len(str(unit)) in [2,3]: unit = str(unit).replace(\"yr\",\"year\") units.append(\"%s**(%s)\" % (unit,", "if obj is None and hasattr(self, 'units'): return self.units =", "arr2, registry=None, axisa=-1, axisb=-1, axisc=-1, axis=None): \"\"\"Applies the cross product", "and if ``axis=-1`` it will be the last dimension. This", "be used. Examples -------- >>> from pint import UnitRegistry >>>", "input_array[0].units, registry=registry) # Input array is an already formed ndarray", "unit = handle_multiply_divide_units( unit, units, out, out_arr) else: raise RuntimeError(", "for word in col_words: float(word) num_cols = len(col_words) break except", "= YTQuantity(5, 'code_length', registry=ds.unit_registry) >>> np.all(a == b) True \"\"\"", "Unit(unit, registry=registry) def __deepcopy__(self, memodict=None): \"\"\"copy.deepcopy implementation This is necessary", "input_units if registry is not None: obj.units.registry = registry return", "the returned tuple, itself a tuple used to restore the", "YTArray.\" % str(context[0])) if unit is None: out_arr = np.array(out_arr,", "sqrt, square, \\ reciprocal, sin, cos, tan, arcsin, arccos, arctan,", "except TypeError: # this case happens when we try to", "kwargs: out_orig = kwargs.pop('out') out = np.asarray(out_orig[0]) else: out =", "sp[\"velocity_x\"] >>> yt.savetxt(\"sphere.dat\", [a,b,c], header='My sphere stuff', delimiter=\"\\t\") \"\"\" if", "if not inp.units.same_dimensions_as(ret.units): # handle special case of adding or", "the :meth:`list_equivalencies` method. Default: None Returns ------- YTArray \"\"\" if", "YTInvalidUnitEquivalence(equiv, self.units, unit) def list_equivalencies(self): \"\"\" Lists the possible equivalencies", "YTUnitConversionError: raise YTInvalidUnitEquivalence(equiv, self.units, unit) else: return new_arr.in_units(unit) else: raise", "3., 2., 3., 4.]) cm \"\"\" v = np.concatenate(arrs, axis=axis)", "equivalence, **kwargs) def to(self, units, equivalence=None, **kwargs): \"\"\" An alias", "A = yt.YTArray([1, 2, 3], 'cm') >>> B = yt.YTArray([2,", "data. \"\"\" return super(YTArray, self).__abs__() # # Start comparison operators.", "operators are not defined for YTArray instances\") def bitop_units(unit1, unit2):", "YTUnitOperationError('power', power.unit) # Work around a sympy issue (I think?)", ":meth:`list_equivalencies` method. Examples -------- >>> a = yt.YTArray(1.0e7,\"K\") >>> a.to_equivalent(\"keV\",", "= [units[col] for col in usecols] mylog.info(\"Array units: %s\" %", "separating columns. header : str, optional String that will be", "= sanitize_units_mul(self, right_object) return super(YTArray, self).__truediv__(ro) def __rtruediv__(self, left_object): \"\"\"", "at the end of the file. comments : str, optional", "the unit object. dtype : data-type The dtype of the", "= self.d values *= conversion_factor if offset: np.subtract(self, offset*self.uq, self)", "\"\"\" Test if this is greater than the object on", "array's data. \"\"\" return self.view(np.ndarray) def to_ndarray(self): \"\"\" Creates a", "base, exponent in arr._units.items(): bs = convert_pint_units(base) p_units.append(\"%s**(%s)\" % (bs,", "If the other object is not a YTArray, then one", "if unit_operator in (multiply_units, divide_units): out_arr, out_arr, unit = handle_multiply_divide_units(", "true_divide, power, remainder, mod, arctan2, hypot, bitwise_and, bitwise_or, bitwise_xor, left_shift,", "# numpy version equal to or newer than 1.13 def", "f.readlines(): words = line.strip().split() if len(words) == 0: continue if", "super(YTArray, self).mean(axis, dtype, out), self.units @return_arr def sum(self, axis=None, dtype=None,", "validate_numpy_wrapper_units(v, [arr1, arr2]) return v def unorm(data, ord=None, axis=None, keepdims=False):", "unit_operator in (preserve_units, comparison_unit, arctan2_unit): inps, units = handle_comparison_units( inps,", "= ( sin, cos, tan, ) class YTArray(np.ndarray): \"\"\" An", "Returns Unit object. \"\"\" # let Unit() handle units arg", "return_without_unit, sqrt: sqrt_unit, square: square_unit, reciprocal: reciprocal_unit, sin: return_without_unit, cos:", "# End comparison operators # # # Begin reduction operators", "a YTArray. if hasattr(other, 'units'): if this.units.expr is other.units.expr: if", "data. Parameters ---------- input_array : :obj:`!iterable` A tuple, list, or", "def multiply_units(unit1, unit2): return unit1 * unit2 def preserve_units(unit1, unit2=None):", "convert_pint_units NULL_UNIT = Unit() POWER_SIGN_MAPPING = {multiply: 1, divide: -1}", "the specified unit system. Parameters ---------- unit_system : string, optional", "tuple)): return cls2 if cls2 in (np.ndarray, np.matrix, np.ma.masked_array) or", "object The registry to create units from. If input_units is", "\" \"dimensionless!\") units = [\"dimensionless\"]*num_cols arrays = np.loadtxt(fname, dtype=dtype, comments=comments,", "__and__(self, right_object): return super(YTArray, self).__and__(right_object) def __rand__(self, left_object): return super(YTArray,", "coerce_iterable_units(inputs[1]) unit1 = getattr(inp1, 'units', None) unit2 = getattr(inp2, 'units',", "the first dimension and if ``axis=-1`` it will be the", "self def __floordiv__(self, right_object): ro = sanitize_units_mul(self, right_object) return super(YTArray,", "in_base(self, unit_system=\"cgs\"): \"\"\" Creates a copy of this array with", "out_arr = func(np.asarray(inp), out=out, **kwargs) if ufunc in (multiply, divide)", "a sympy issue (I think?) # # If I don't", "\"\"\" lo = sanitize_units_add(self, left_object, \"addition\") return super(YTArray, self).__radd__(lo) def", "def __str__(self): \"\"\" \"\"\" return str(self.view(np.ndarray)) + ' ' +", "a + b YTArray([ 401., 502., 603.]) cm >>> b", "!= 1.0: if not units[0].is_dimensionless: if units[0].dimensions == units[1].dimensions: out_arr", "log1p, sqrt, square, \\ reciprocal, sin, cos, tan, arcsin, arccos,", "#----------------------------------------------------------------------------- import copy import numpy as np from distutils.version import", "as this array\"\"\" return np.ones_like(self) ua = unit_array def __getitem__(self,", "String that will be prepended to the ``header`` and ``footer``", "def __rtruediv__(self, left_object): \"\"\" See __div__. \"\"\" lo = sanitize_units_mul(self,", "usecols : sequence, optional Which columns to read, with 0", "cls2: return cls1 if cls1 in (np.ndarray, np.matrix, np.ma.masked_array) or", "Default: None Returns ------- NumPy array \"\"\" if units is", "will create a copy of the data in the iterable.", "the Modified BSD License. # # The full license is", "RuntimeError(\"YTQuantity instances must be scalars\") return ret def __repr__(self): return", "arrays in sequence horizontally (column wise) while preserving units This", "to get the bare quantity in. If not specified, the", "in the equivalent cgs units, and returns it. Returns -------", ": list of YTArrays or single YTArray The array(s) to", "of the input data, or, if none is found, uses", "YTQuantity) and ret.shape != (): ret = ret.view(YTArray) if context", "of the array data.\"\"\" return self.ndarray_view() d = ndview @property", "from. If not specified, the arrays are datasets at the", "dataset to dataset_name: string The name of the dataset to", "ndarray. This is always defined for numpy arrays. \"\"\" np_ret", "with a unit attribute set to the sympy expression 1/1", "happens if you do ndarray * YTQuantity. Explicitly # casting", "square, \\ reciprocal, sin, cos, tan, arcsin, arccos, arctan, arctan2,", "a YTArray. if other is None: return True oth =", "__future__ import print_function #----------------------------------------------------------------------------- # Copyright (c) 2013, yt Development", "See __sub__. \"\"\" oth = sanitize_units_add(self, other, \"subtraction\") np.subtract(self, oth,", "try: len(obj) except: return False return True def return_arr(func): @wraps(func)", "unit information from a text file. Each row in the", "YTQuantity(2.5, \"erg/s\") >>> E_new = E.in_base(unit_system=\"galactic\") \"\"\" return self.in_units(self.units.get_base_equivalent(unit_system)) def", "is always defined for numpy arrays. \"\"\" np_ret = super(YTArray,", "object # under consideration, convert so we don't mix units", "fmod, divmod_, heaviside ) trigonometric_operators = ( sin, cos, tan,", "datasets: >>> import yt >>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a", "top level by default. Examples -------- >>> a = YTArray([1,2,3],", "import UnitRegistry >>> import numpy as np >>> ureg =", "\\ current_mks, \\ dimensionless, \\ em_dimensions from yt.utilities.exceptions import \\", "2: unit_operator = self._ufunc_registry[ufunc] inps, units, ret_class = get_inp_u_binary(ufunc, inputs)", "tan: return_without_unit, sinh: return_without_unit, cosh: return_without_unit, tanh: return_without_unit, arcsin: return_without_unit,", "in the equivalent mks units, and returns it. Returns -------", "the abs of the data. \"\"\" return super(YTArray, self).__abs__() #", "quantities. \"\"\" if memodict is None: memodict = {} ret", "(c) 2013, yt Development Team. # # Distributed under the", "return_without_unit, copysign: passthrough_unit, nextafter: preserve_units, modf: passthrough_unit, ldexp: bitop_units, frexp:", "yt.YTArray(1.0e7,\"K\") >>> a.to_equivalent(\"keV\", \"thermal\") \"\"\" conv_unit = Unit(unit, registry=self.units.registry) if", "def arctan2_unit(unit1, unit2): return NULL_UNIT def comparison_unit(unit1, unit2=None): return None", "'code_length') >>> a.in_cgs() 1.543e+25 cm This is equivalent to: >>>", "arr : Pint Quantity The Quantity to convert from. unit_registry", "super(YTArray, self).__pow__ returns a YTArray # with a unit attribute", "raise_error: raise YTUfuncUnitError(ufunc, *units) inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return", "ucross(arr1, arr2, registry=None, axisa=-1, axisb=-1, axisc=-1, axis=None): \"\"\"Applies the cross", "the documentation for the standard library pickle module: http://docs.python.org/2/library/pickle.html Unit", "line: \\\"%s\\\".\" % line[0]) f.close() if len(units) != num_cols: mylog.warning(\"Malformed", "is equivalent to: >>> b = YTArray(np.ones(5), 'code_length', registry=ds.unit_registry) >>>", "YTArray): if not power.units.is_dimensionless: raise YTUnitOperationError('power', power.unit) # Work around", "Return a YTArray with the abs of the data. \"\"\"", "than or equal to the object on the right. \"\"\"", "isinstance(unit2, YTArray): if unit2.units.is_dimensionless: pass else: raise YTUnitOperationError(ufunc, unit1, unit2)", "that preserves units This is a wrapper around np.dot that", "to_equivalent(self, unit, equiv, **kwargs): \"\"\" Convert a YTArray or YTQuantity", "= sanitize_units_mul(self, other) np.true_divide(self, oth, out=self) return self def __floordiv__(self,", "ndarray\"\"\" return np.array(self) v = value @property def ndview(self): \"\"\"Get", "units, and returns it. Optionally, an equivalence can be specified", "if self.units.same_dimensions_as(conv_unit): return self.in_units(conv_unit) this_equiv = equivalence_registry[equiv]() oneway_or_equivalent = (", "arrays in sequence vertically (row wise) while preserving units This", "units you want to get a new quantity in. equivalence", "will be the first dimension and if ``axis=-1`` it will", "them when it would be annoying to deal with them.", "or issubclass(cls2, (numeric_type, np.number, list, tuple)): return cls1 if issubclass(cls1,", "YTArray. if other is None: return True oth = validate_comparison_units(self,", "minimum: preserve_units, fmax: preserve_units, fmin: preserve_units, isreal: return_without_unit, iscomplex: return_without_unit,", "or floating point scalar The scalar to attach units to", "in arrs): return v if not all(isinstance(a, YTArray) for a", "attaches a symbolic unit object to the array data. Parameters", "units. Parameters ---------- unit : string The unit that you", "(multiply_units, divide_units): out_arr, out_arr, unit = handle_multiply_divide_units( unit, units, out_arr,", "UnitParseError( \"Code units used without referring to a dataset. \\n\"", "be specified to convert to an equivalent quantity which is", "is None: raise ImportError(\"You don't have AstroPy installed, so you", "not specified, the default base units of cgs are used.", ": data-type The dtype of the array data. Examples --------", "v = validate_numpy_wrapper_units(v, arrs) return v def array_like_field(data, x, field):", "symbolic unit object to the array data. Parameters ---------- input_array", "Must check for the correct (same dimension) units. \"\"\" ro", "= u**(power_sign*inp.size) else: unit = self._ufunc_registry[ufunc](u) ret_class = type(self) elif", "divide_units): out_arr, out_arr, unit = handle_multiply_divide_units( unit, units, out_arr, out_arr)", "if not any(isinstance(a, YTArray) for a in arrs): return v", "other @lru_cache(maxsize=128, typed=False) def _unit_repr_check_same(my_units, other_units): \"\"\" Takes a Unit", "If not specified, the default base units of cgs are", "around numpy.intersect1d that preserves units. All input arrays must have", "arrays to. If not specified, the arrays are datasets at", "out_arr, inp, u = get_inp_u_unary(ufunc, inputs, out_arr) unit = self._ufunc_registry[context[0]](u)", "return YTArray(dot, units) def uvstack(arrs): \"\"\"Stack arrays in sequence vertically", "units, equivalence=None, **kwargs): \"\"\" Creates a copy of this array", "input validation logic adds significant overhead. If set, input_units *must*", "\"\"\" conv_unit = Unit(unit, registry=self.units.registry) if self.units.same_dimensions_as(conv_unit): return self.in_units(conv_unit) this_equiv", "unit : string The unit that you wish to convert", "np.matrix, np.ma.masked_array) or issubclass(cls1, (numeric_type, np.number, list, tuple)): return cls2", "YTQuantity(ret, self.units, bypass_validation=True) else: if hasattr(self, 'units'): ret.units = self.units", "bypass_validation is True: obj = np.asarray(input_array, dtype=dtype).view(cls) obj.units = input_units", "of numpy.concatenate for full details. Examples -------- >>> A =", "get_inp_u_binary(ufunc, inputs) if unit_operator in (comparison_unit, arctan2_unit): inps, units =", "np.float64 bypass_validation : boolean If True, all input validation is", "equiv : string The equivalence you wish to use. To", "= np.loadtxt(fname, dtype=dtype, comments=comments, delimiter=delimiter, converters=None, unpack=True, usecols=usecols, ndmin=0) if", "[units[col] for col in usecols] mylog.info(\"Array units: %s\" % \",", "return ret[()] else: return ret ufunc = context[0] inputs =", "= yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.quan(5, 'code_length') >>> a.in_cgs() 1.543e+25", "= len(col_words) break except ValueError: mylog.warning(\"Unrecognized character at beginning of", "with data converted to cgs units. \"\"\" return self.in_units(self.units.get_cgs_equivalent()) def", "k.replace('_', '\\ ') + '}') registry = UnitRegistry(lut=lut, add_default_symbols=False) self.units", "if input_units.startswith('code_'): raise UnitParseError( \"Code units used without referring to", "\"Support for the %s ufunc with %i inputs has not", "from a text file. Each row in the text file", "you wish to use. To see which equivalencies are supported", "return self.in_units(self.units.get_mks_equivalent()) def to_equivalent(self, unit, equiv, **kwargs): \"\"\" Convert a", "out_arr = ret_class(np.asarray(out_arr), unit) if out is not None: out_orig[0].flat[:]", "from yt.utilities.on_demand_imports import _astropy from sympy import Rational from yt.units.unit_lookup_table", "YTIterableUnitCoercionError, \\ YTInvalidUnitEquivalence, YTEquivalentDimsError from yt.utilities.lru_cache import lru_cache from numbers", "type(self) elif ufunc in binary_operators: unit_operator = self._ufunc_registry[context[0]] inps, units,", "arr2]) return v def unorm(data, ord=None, axis=None, keepdims=False): \"\"\"Matrix or", "_unit_repr_check_same(my_units, other_units): \"\"\" Takes a Unit object, or string of", "= self.in_units(units, equivalence=equivalence, **kwargs).value if isinstance(self, YTQuantity): return float(v) else:", "the arrays to. If not specified, the arrays are datasets", "\"\"\" if _astropy.units is None: raise ImportError(\"You don't have AstroPy", "a Unit obj. if not isinstance(other_units, Unit): other_units = Unit(other_units,", "group_name is not None: if group_name in f: g =", "with the data in the specified unit system, and returns", "Quantity to convert from. unit_registry : yt UnitRegistry, optional A", "other.units.base_value: return other if not this.units.same_dimensions_as(other.units): raise YTUnitOperationError(op_string, this.units, other.units)", "def from_hdf5(cls, filename, dataset_name=None, group_name=None): r\"\"\"Attempts read in and convert", "Unit(unit, registry=self.units.registry) if self.units.same_dimensions_as(conv_unit): return self.in_units(conv_unit) this_equiv = equivalence_registry[equiv]() oneway_or_equivalent", "# Start operation methods # if LooseVersion(np.__version__) < LooseVersion('1.13.0'): def", "is a wrapper around np.stack that preserves units. \"\"\" v", "numpy.concatenate for full details. Examples -------- >>> A = yt.YTArray([1,", "unit) for arr, unit in zip(arrays, units)]) def savetxt(fname, arrays,", "with unit information from a text file. Each row in", "fmt : str or sequence of strs, optional A single", "unit as this array and a value of 1.0\"\"\" return", "sorted unique elements of the two input arrays. A wrapper", "sanitize_units_add(this_object, other_object, op_string): inp = coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) #", "[arr1, arr2]) return v def uunion1d(arr1, arr2): \"\"\"Find the union", "* unit2 def preserve_units(unit1, unit2=None): return unit1 @lru_cache(maxsize=128, typed=False) def", "\"*\".join(p_units) if isinstance(arr.magnitude, np.ndarray): return YTArray(arr.magnitude, p_units, registry=unit_registry) else: return", "other): \"\"\" Test if this is less than the object", "other) np.multiply(self, oth, out=self) return self def __div__(self, right_object): \"\"\"", "\"\"\" See __add__. \"\"\" oth = sanitize_units_add(self, other, \"addition\") np.add(self,", "LooseVersion from functools import wraps from numpy import \\ add,", "dtype = getattr(input_array, 'dtype', np.float64) if bypass_validation is True: obj", "load an old pickle file # created before we serialized", "YTArray([ 1., 2., 3., 4.]) cm \"\"\" v = np.union1d(arr1,", "[arr1, arr2]) return v def unorm(data, ord=None, axis=None, keepdims=False): \"\"\"Matrix", "subclass. \" \"Received operand types (%s) and (%s)\" % (cls1,", "'greater than or equal') return super(YTArray, self).__ge__(oth) def __gt__(self, other):", "don't handle all the cases here, let the Unit class", "return other_units unary_operators = ( negative, absolute, rint, sign, conj,", "of numbers try: col_words = line.strip().split(delimiter) for word in col_words:", "continue if line[0] == comments: if next_one: units = words[1:]", "isinstance(input_object, np.ndarray): return input_object if iterable(input_object): if any([isinstance(o, YTArray) for", "this_equiv = equivalence_registry[equiv]() oneway_or_equivalent = ( conv_unit.has_equivalent(equiv) or this_equiv._one_way) if", "greater_equal: comparison_unit, less: comparison_unit, less_equal: comparison_unit, not_equal: comparison_unit, equal: comparison_unit,", "to of the hdf5 file. dataset_name: string The name of", "defined for all numpy versions, see # numpy issue #9081", "unit_system=\"cgs\"): \"\"\" Convert the array and units to the equivalent", "import cPickle as pickle if dataset_name is None: dataset_name =", "0.47712125, 0.60205999, 0.69897 , 0.77815125, 0.84509804]) YTArray is tightly integrated", "the dtype of the input data, or, if none is", "in zip(u.bases, u.powers): unit_str = base.to_string() # we have to", "op2.d) units = op1.units*op2.units if dot.shape == (): return YTQuantity(dot,", "import numpy as np >>> ureg = UnitRegistry() >>> a", "dimensions as the object # under consideration, convert so we", "None: raise ImportError(\"You don't have AstroPy installed, so you can't", "rint, sign, conj, exp, exp2, log, log2, log10, expm1, log1p,", "sanitize_units_add(self, right_object, \"addition\") return super(YTArray, self).__add__(ro) def __radd__(self, left_object): \"\"\"", "= new_units values = self.d values *= conversion_factor if offset:", "\"added to YTArray.\" % (str(ufunc), len(inputs))) if unit is None:", "\"\"\"Find the union of two arrays. A wrapper around numpy.intersect1d", "if isinstance(power, YTArray): if not power.units.is_dimensionless: raise YTUnitOperationError('power', power.unit) #", "of a comment; default: '#'. Examples -------- >>> temp, velx", "unit1, unit2) unit2 = 1.0 return (inp1, inp2), (unit1, unit2),", ">>> np.log10(a) array([ -inf, 0. , 0.30103 , 0.47712125, 0.60205999,", "is not None: if group_name in f: g = f[group_name]", "= sanitize_units_mul(self, other) np.multiply(self, oth, out=self) return self def __div__(self,", "right_object): return super(YTArray, self).__xor__(right_object) def __rxor__(self, left_object): return super(YTArray, self).__rxor__(left_object)", "to or newer than 1.13 def __array_ufunc__(self, ufunc, method, *inputs,", "equivalency, which should be used if that particular equivalency requires", "signbit, copysign, nextafter, \\ modf, ldexp, frexp, fmod, floor, ceil,", "other object is a YTArray before we use the `units`", "see if this YTArray or YTQuantity has an equivalent unit", "input_units.registry: units = Unit(str(input_units), registry=registry) else: units = input_units else:", "other_units, base) if not my_units.same_dimensions_as(other_units): raise YTUnitConversionError( my_units, my_units.dimensions, other_units,", "must be # dimensionless or filled with zeros if not", "- 4, 'g/cm**3') >>> np.abs(a) YTArray([4, 3, 2, 1, 0,", "np_ret[3:] return new_ret def __setstate__(self, state): \"\"\"Pickle setstate method This", "convert_to_units(self, units): \"\"\" Convert the array and units to the", "around np.hstack that preserves units. \"\"\" v = np.hstack(arrs) v", "dtype=None, out=None): return super(YTArray, self).sum(axis, dtype, out), self.units @return_arr def", "prepended to the ``header`` and ``footer`` strings, to mark them", "obj): if obj is None and hasattr(self, 'units'): return self.units", "wrapper around numpy.concatenate preserves units. All input arrays must have", "units, out, out_arr): if unit.is_dimensionless and unit.base_value != 1.0: if", "hasattr(self, 'units'): return self.units = getattr(obj, 'units', NULL_UNIT) def __pos__(self):", "YTArray.\" % (str(ufunc), len(inputs))) if unit is None: out_arr =", "numpy versions, see # numpy issue #9081 return type(self)(super(YTArray, self).__pos__(),", "ord=ord, axis=axis) else: norm = np.linalg.norm(data, ord=ord, axis=axis, keepdims=keepdims) if", "@lru_cache(maxsize=128, typed=False) def multiply_units(unit1, unit2): return unit1 * unit2 def", "return self def __or__(self, right_object): return super(YTArray, self).__or__(right_object) def __ror__(self,", "return_without_unit, log: return_without_unit, log2: return_without_unit, log10: return_without_unit, expm1: return_without_unit, log1p:", "self).__pow__(power) def __abs__(self): \"\"\" Return a YTArray with the abs", "trigonometric_operators = ( sin, cos, tan, ) class YTArray(np.ndarray): \"\"\"", "YTArray(v, units, registry=registry) return arr def uintersect1d(arr1, arr2, assume_unique=False): \"\"\"Find", ": AstroPy Quantity The Quantity to convert from. unit_registry :", "YTArray avoids creating a YTQuantity with # size > 1", "other, 'greater than or equal') return super(YTArray, self).__ge__(oth) def __gt__(self,", "coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) # Make sure the other object", "dimensions. .. note:: All additional keyword arguments are passed to", "without units. Output is therefore a bare NumPy array. Optionally,", "axis=axis) units = arr1.units * arr2.units arr = YTArray(v, units,", "tuple): try: return type(self)(new_arr[0], new_arr[1]).in_units(unit) except YTUnitConversionError: raise YTInvalidUnitEquivalence(equiv, self.units,", "unit*unit @lru_cache(maxsize=128, typed=False) def divide_units(unit1, unit2): return unit1/unit2 @lru_cache(maxsize=128, typed=False)", "if dataset_name in g.keys(): d = g[dataset_name] # Overwrite without", "boolean If True, all input validation is skipped. Using this", "comparison_unit, logical_or: comparison_unit, logical_xor: comparison_unit, logical_not: return_without_unit, maximum: preserve_units, minimum:", "specified using python syntax (cm**3, not cm^3). registry : ~yt.units.unit_registry.UnitRegistry", "units = words[1:] if len(words) == 2 and words[1] ==", "units[1].is_dimensionless any_nonzero = [np.any(inps[0]), np.any(inps[1])] if any_nonzero[0] == np.bool_(False): units", ": yt UnitRegistry, optional A yt unit registry to use", "return YTArray(arr.magnitude, p_units, registry=unit_registry) else: return YTQuantity(arr.magnitude, p_units, registry=unit_registry) def", "a new quantity in. equivalence : string, optional The equivalence", "elif any_nonzero[1] == np.bool_(False): units = (units[0], units[0]) elif not", "string The equivalence you wish to use. To see which", "input_array : :obj:`!iterable` A tuple, list, or array to attach", "If I don't do this, super(YTArray, self).__pow__ returns a YTArray", "as np from distutils.version import LooseVersion from functools import wraps", "return_without_unit, iscomplex: return_without_unit, isfinite: return_without_unit, isinf: return_without_unit, isnan: return_without_unit, signbit:", "See __add__. \"\"\" oth = sanitize_units_add(self, other, \"addition\") np.add(self, oth,", "= units[0].is_dimensionless u2d = units[1].is_dimensionless any_nonzero = [np.any(inps[0]), np.any(inps[1])] if", "c = sp[\"velocity_x\"] >>> yt.savetxt(\"sphere.dat\", [a,b,c], header='My sphere stuff', delimiter=\"\\t\")", "**kwargs) if ret.shape == (): return YTQuantity(ret, units) else: #", "\"\"\" return str(self.view(np.ndarray)) + ' ' + str(self.units) # #", "documentation of numpy.intersect1d for full details. Examples -------- >>> A", "arr2, axisa=axisa, axisb=axisb, axisc=axisc, axis=axis) units = arr1.units * arr2.units", "return input_object def sanitize_units_mul(this_object, other_object): inp = coerce_iterable_units(this_object) ret =", "if unit is None: out_arr = np.array(out_arr, copy=False) elif ufunc", "units = finfo.output_units else: units = finfo.units if isinstance(x, YTArray):", "= line.strip().split() if len(words) == 0: continue if line[0] ==", "for k, v in lut.items() if len(v) == 2]: lut[k]", "other) np.floor_divide(self, oth, out=self) return self def __or__(self, right_object): return", "*inputs, **kwargs): func = getattr(ufunc, method) if 'out' in kwargs:", "well. group_name: string An optional group to read the arrays", "== 'reduce': power_sign = POWER_SIGN_MAPPING[ufunc] if 'axis' in kwargs and", "== 2 and words[1] == \"Units\": next_one = True else:", "the right of the `/` operator. \"\"\" ro = sanitize_units_mul(self,", "the equivalent cgs units, and returns it. Returns ------- Quantity", "def ndarray_view(self): \"\"\" Returns a view into the array, but", "a dimensionless Unit object. if self.units.is_dimensionless and power == -1:", "(numeric_type, np.number, np.ndarray)): raise RuntimeError(\"YTQuantity values must be numeric\") ret", "passthrough_unit, nextafter: preserve_units, modf: passthrough_unit, ldexp: bitop_units, frexp: return_without_unit, floor:", "check that it is compatible with this quantity. Returns Unit", "signbit: return_without_unit, copysign: passthrough_unit, nextafter: preserve_units, modf: passthrough_unit, ldexp: bitop_units,", "keyword arguments. The keepdims argument is ignored if the version", "types (%s) and (%s)\" % (cls1, cls2)) def loadtxt(fname, dtype='float',", "by pickle. \"\"\" super(YTArray, self).__setstate__(state[1:]) try: unit, lut = state[0]", "{multiply: 1, divide: -1} # redefine this here to avoid", ".pint_conversions import convert_pint_units NULL_UNIT = Unit() POWER_SIGN_MAPPING = {multiply: 1,", "ufunc is power: unit2 = inp2 if isinstance(unit2, np.ndarray): if", "str. units = Unit(input_units, registry=registry) # Attach the units obj.units", "try the :meth:`list_equivalencies` method. Examples -------- >>> a = yt.YTArray(1.0e7,\"K\")", "as pickle if info is None: info = {} info['units']", "# def write_hdf5(self, filename, dataset_name=None, info=None, group_name=None): r\"\"\"Writes a YTArray", "ufunc, method, *inputs, **kwargs): func = getattr(ufunc, method) if 'out'", "array, but as an ndarray rather than ytarray. Returns -------", "is not power: unit2 = Unit(registry=getattr(unit1, 'registry', None)) elif ufunc", "= sp[\"velocity_x\"] >>> yt.savetxt(\"sphere.dat\", [a,b,c], header='My sphere stuff', delimiter=\"\\t\") \"\"\"", "the pickle file unit, lut = str(state[0]), default_unit_symbol_lut.copy() # need", "end of the file. comments : str, optional String that", "2., 3., 4.]) cm \"\"\" v = np.union1d(arr1, arr2) v", "dimensions of the result. For example, if ``axis=0`` it will", "into a YTArray. Parameters ---------- filename: string The filename to", "stuff', delimiter=\"\\t\") \"\"\" if not isinstance(arrays, list): arrays = [arrays]", "newer than 1.13 def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): func", "units, equivalence=None, **kwargs): \"\"\" An alias for YTArray.in_units(). See the", "this_equiv.convert( self, conv_unit.dimensions, **kwargs) if isinstance(new_arr, tuple): try: return type(self)(new_arr[0],", "array\"\"\" return np.ones_like(self) ua = unit_array def __getitem__(self, item): ret", "a value of 1.0\"\"\" return YTQuantity(1.0, self.units) uq = unit_quantity", "validate_comparison_units( self, other, 'greater than or equal') return super(YTArray, self).__ge__(oth)", "unit**power @lru_cache(maxsize=128, typed=False) def square_unit(unit): return unit*unit @lru_cache(maxsize=128, typed=False) def", "= sanitize_units_mul(self, right_object) return super(YTArray, self).__mul__(ro) def __rmul__(self, left_object): \"\"\"", "self).__pos__(), self.units) @return_arr def dot(self, b, out=None): return super(YTArray, self).dot(b),", "**kwargs) def to(self, units, equivalence=None, **kwargs): \"\"\" An alias for", "to convert from. unit_registry : yt UnitRegistry, optional A yt", "object. dtype : data-type The dtype of the array data.", "around a sympy issue (I think?) # # If I", "for arr, unit in zip(arrays, units)]) def savetxt(fname, arrays, fmt='%.18e',", "ap_units.append(\"%s**(%s)\" % (unit_str, Rational(exponent))) ap_units = \"*\".join(ap_units) if isinstance(arr.value, np.ndarray):", "\"\"\" dot = np.dot(op1.d, op2.d) units = op1.units*op2.units if dot.shape", "filename: string The filename to of the hdf5 file. dataset_name:", "func(np.asarray(inps[0]), np.asarray(inps[1]), out=out, **kwargs) if unit_operator in (multiply_units, divide_units): out,", "String unit specification, unit symbol object, or astropy units The", "'cm') >>> B = yt.YTArray([2, 3, 4], 'cm') >>> uconcatenate((A,", "% (unit, Rational(pow))) units = \"*\".join(units) return unit_registry.Quantity(self.value, units) #", "3, 4], 'cm') >>> uconcatenate((A, B)) YTArray([ 1., 2., 3.,", "+= '\\n' header += \" Units\\n \" + '\\t'.join(units) np.savetxt(fname,", "not all(isinstance(a, YTArray) for a in arrs): raise RuntimeError(\"Not all", "mylog.info(\"Array units: %s\" % \", \".join(units)) return tuple([YTArray(arr, unit) for", "def return_arr(func): @wraps(func) def wrapped(*args, **kwargs): ret, units = func(*args,", "(%s)\" % (cls1, cls2)) def loadtxt(fname, dtype='float', delimiter='\\t', usecols=None, comments='#'):", "a units attribute, attempt to infer units as well. group_name:", "return wrapped @lru_cache(maxsize=128, typed=False) def sqrt_unit(unit): return unit**0.5 @lru_cache(maxsize=128, typed=False)", "typed=False) def power_unit(unit, power): return unit**power @lru_cache(maxsize=128, typed=False) def square_unit(unit):", "else: if raise_error: raise YTUfuncUnitError(ufunc, *units) inps = (inps[0], ret_class(inps[1]).to(", "lut.update(default_unit_symbol_lut) for k, v in [(k, v) for k, v", "if not isinstance(arrays, list): arrays = [arrays] units = []", "axis=axis) else: norm = np.linalg.norm(data, ord=ord, axis=axis, keepdims=keepdims) if norm.shape", "return super(YTArray, self).__eq__(oth) def __ne__(self, other): \"\"\" Test if this", "as a numpy ndarray\"\"\" return np.array(self) v = value @property", "Parameters ---------- units : Unit object or str The units", "unit2.units.is_dimensionless: pass else: raise YTUnitOperationError(ufunc, unit1, unit2) unit2 = 1.0", "unit1 * unit2 def preserve_units(unit1, unit2=None): return unit1 @lru_cache(maxsize=128, typed=False)", "in the supplied units, and returns it without units. Output", "rad2deg, \\ bitwise_and, bitwise_or, bitwise_xor, invert, left_shift, right_shift, \\ greater,", "= getattr(ufunc, method) if 'out' in kwargs: out_orig = kwargs.pop('out')", "of the returned tuple, itself a tuple used to restore", "is True: obj = np.asarray(input_array, dtype=dtype).view(cls) obj.units = input_units if", "Pint UnitRegistry, optional The Pint UnitRegistry to use in the", "size > 1 out_arr = YTArray(np.asarray(out_arr), unit) else: out_arr =", "is not None: units = self.units**self.shape[axis] else: units = self.units**self.size", "power): \"\"\" Raise this YTArray to some power. Parameters ----------", "== units[1].dimensions: out_arr = np.multiply(out_arr.view(np.ndarray), unit.base_value, out=out) unit = Unit(registry=unit.registry)", "2.0 def __new__(cls, input_array, input_units=None, registry=None, dtype=None, bypass_validation=False): if dtype", "for the standard library pickle module: http://docs.python.org/2/library/pickle.html Unit metadata is", "unit symbol, and check that it is compatible with this", "comparison_unit, logical_and: comparison_unit, logical_or: comparison_unit, logical_xor: comparison_unit, logical_not: return_without_unit, maximum:", "f.close() registry = UnitRegistry(lut=unit_lut, add_default_symbols=False) return cls(data, units, registry=registry) #", "return self.units = getattr(obj, 'units', NULL_UNIT) def __pos__(self): \"\"\" Posify", "b = YTQuantity(5, 'code_length', registry=ds.unit_registry) >>> np.all(a == b) True", "return data.ds.quan(x, units) def get_binary_op_return_class(cls1, cls2): if cls1 is cls2:", "same number of values. Parameters ---------- fname : str Filename", "') + '}') registry = UnitRegistry(lut=lut, add_default_symbols=False) self.units = Unit(unit,", "self._ufunc_registry[context[0]](u) ret_class = type(self) elif ufunc in binary_operators: unit_operator =", "to read. dtype : data-type, optional Data-type of the resulting", "to append as attributes to the dataset. group_name: string An", "if units[0] != units[1]: u1d = units[0].is_dimensionless u2d = units[1].is_dimensionless", "units to the equivalent cgs units. \"\"\" return self.convert_to_units(self.units.get_cgs_equivalent()) def", "out_arr.size == 1: out_arr = YTQuantity(np.asarray(out_arr), unit) else: if ret_class", "object is a YTArray and has the same dimensions as", "add_default_symbols=False) self.units = Unit(unit, registry=registry) def __deepcopy__(self, memodict=None): \"\"\"copy.deepcopy implementation", "issue (I think?) # # If I don't do this,", "to create units from. If input_units is already associated with", "YTArray \"\"\" if equivalence is None: new_units = _unit_repr_check_same(self.units, units)", "self).__rand__(left_object) def __iand__(self, other): np.bitwise_and(self, other, out=self) return self def", ">>> a = YTQuantity(1, 'cm') >>> b = YTQuantity(2, 'm')", "is specified, this will be used instead of the registry", "YTArray filled with ones with the same unit and shape", "= ret.in_units(inp.units) else: # If the other object is not", "a unit attribute set to the sympy expression 1/1 rather", "self).__ne__(oth) def __ge__(self, other): \"\"\" Test if this is greater", "(1,4,5)`` will extract the 2nd, 5th and 6th columns. The", "YTArray to some power. Parameters ---------- power : float or", "than 1.13 def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): func =", "in (modf, divmod_): out_arr = tuple((ret_class(o, unit) for o in", "fmax, fmin, \\ isreal, iscomplex, isfinite, isinf, isnan, signbit, copysign,", "out=self) return self def __xor__(self, right_object): return super(YTArray, self).__xor__(right_object) def", "unit_state = (((str(self.units), self.units.registry.lut),) + obj_state[:],) new_ret = np_ret[:2] +", "+ b YTArray([ 401., 502., 603.]) cm >>> b +", "raise RuntimeError(\"YTQuantity values must be numeric\") ret = YTArray.__new__(cls, input_scalar,", "np.bitwise_xor(self, other, out=self) return self def __and__(self, right_object): return super(YTArray,", "exponent in arr._units.items(): bs = convert_pint_units(base) p_units.append(\"%s**(%s)\" % (bs, Rational(exponent)))", "Number as numeric_type from yt.utilities.on_demand_imports import _astropy from sympy import", "wrapper around np.linalg.norm that preserves units. See the documentation for", "of the array data. Defaults to the dtype of the", "raise YTInvalidUnitEquivalence(equiv, self.units, unit) def list_equivalencies(self): \"\"\" Lists the possible", "else: # This could be a subclass, so don't call", "\"Received operand types (%s) and (%s)\" % (cls1, cls2)) def", "keyword arguments are passed to the equivalency, which should be", "this software. #----------------------------------------------------------------------------- import copy import numpy as np from", "raise UnitParseError( \"Code units used without referring to a dataset.", "def dot(self, b, out=None): return super(YTArray, self).dot(b), self.units*b.units def __reduce__(self):", "ret def __repr__(self): return str(self) def validate_numpy_wrapper_units(v, arrs): if not", "as this array and a value of 1.0\"\"\" return YTQuantity(1.0,", "\"\"\" if memodict is None: memodict = {} ret =", "is a YTArray. if other is None: # self is", "of the new axis in the dimensions of the result.", "on the right. \"\"\" # Check that other is a", "out_arr=None): inp = inputs[0] u = getattr(inp, 'units', None) if", ") class YTArray(np.ndarray): \"\"\" An ndarray subclass that attaches a", "the same dimensions. .. note:: All additional keyword arguments are", "\"\"\" Check to see if this YTArray or YTQuantity has", "the start of a comment; default: '#'. Examples -------- >>>", "objects handle being multiplied. \"\"\" ro = sanitize_units_mul(self, right_object) return", "__sub__. \"\"\" oth = sanitize_units_add(self, other, \"subtraction\") np.subtract(self, oth, out=self)", "than or equal') return super(YTArray, self).__ge__(oth) def __gt__(self, other): \"\"\"", "details. \"\"\" return self.in_units(units, equivalence=equivalence, **kwargs) def to_value(self, units=None, equivalence=None,", "\"\"\" Test if this is less than the object on", "with # size > 1 return YTArray(np.array(out_arr), unit) return ret_class(np.array(out_arr,", "out_arr, out_arr, unit = handle_multiply_divide_units( unit, units, out_arr, out_arr) else:", "next_one = False units = [] num_cols = -1 for", "to two YT arrays. This wrapper around numpy.cross preserves units.", "from pint import UnitRegistry if unit_registry is None: unit_registry =", "preserve_units, isreal: return_without_unit, iscomplex: return_without_unit, isfinite: return_without_unit, isinf: return_without_unit, isnan:", "__abs__(self): \"\"\" Return a YTArray with the abs of the", "that preserves units. \"\"\" v = np.stack(arrs) v = validate_numpy_wrapper_units(v,", "the same units. See the documentation of numpy.concatenate for full", "other is a YTArray. oth = validate_comparison_units(self, other, 'greater than')", "Creates a new AstroPy quantity with the same unit information.", "of the registry associated with the unit object. dtype :", "\"\"\"Pickle reduction method See the documentation for the standard library", "self).__le__(oth) def __eq__(self, other): \"\"\" Test if this is equal", "this is not equal to the object on the right.", "unit if out_arr.size == 1: return YTQuantity(np.array(out_arr), unit) else: if", "with the data in the supplied units, and returns it", "= (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units def handle_comparison_units(inps, units,", "handle special case of adding or subtracting with zero or", "expm1: return_without_unit, log1p: return_without_unit, sqrt: sqrt_unit, square: square_unit, reciprocal: reciprocal_unit,", "super(YTArray, self).__getitem__(item) if ret.shape == (): return YTQuantity(ret, self.units, bypass_validation=True)", "= unit_operator(*units) if unit_operator in (multiply_units, divide_units): out_arr, out_arr, unit", "them. >>> np.log10(a) array([ -inf, 0. , 0.30103 , 0.47712125,", "raise RuntimeError(\"Undefined operation for a YTArray subclass. \" \"Received operand", "of the data in the iterable. return YTArray(input_object) return input_object", "== (): return YTQuantity(norm, data.units) return YTArray(norm, data.units) def udot(op1,", "returns it. Returns ------- Quantity object with data converted to", "The name of the dataset to read from. If the", "= getattr(inp2, 'units', None) ret_class = get_binary_op_return_class(type(inp1), type(inp2)) if unit1", "before we use the `units` # attribute. if isinstance(ret, YTArray):", "= units elif isinstance(input_units, Unit): ret.units = input_units else: ret.units", "you can't convert to \" + \"an AstroPy quantity.\") return", "fix up the lut if the pickle was saved prior", "returns it. Optionally, an equivalence can be specified to convert", "trunc: passthrough_unit, spacing: passthrough_unit, positive: passthrough_unit, divmod_: passthrough_unit, isnat: return_without_unit,", "is equal to the object on the right. \"\"\" #", "ufunc, ret_class) unit = unit_operator(*units) out_arr = func(np.asarray(inps[0]), np.asarray(inps[1]), out=out,", "inp2 if isinstance(unit2, np.ndarray): if isinstance(unit2, YTArray): if unit2.units.is_dimensionless: pass", "out_arr = np.array(out_arr, copy=False) return out_arr out_arr.units = unit if", "wrapper around np.stack that preserves units. \"\"\" v = np.stack(arrs)", "Start convenience methods # @property def value(self): \"\"\"Get a copy", "stripped \"\"\" return np.array(self) @classmethod def from_astropy(cls, arr, unit_registry=None): \"\"\"", "to use in the conversion. If one is not supplied,", "inps, units, ret_class = get_inp_u_binary(ufunc, inputs) if unit_operator in (comparison_unit,", "return YTArray(arr.value, ap_units, registry=unit_registry) else: return YTQuantity(arr.value, ap_units, registry=unit_registry) def", ">>> import numpy as np >>> ureg = UnitRegistry() >>>", "g = f[group_name] else: g = f.create_group(group_name) else: g =", "elements of the two input arrays. A wrapper around numpy.intersect1d", "away with it. if d.shape == self.shape and d.dtype ==", "and hasattr(self, 'units'): return self.units = getattr(obj, 'units', NULL_UNIT) def", "number of values. Parameters ---------- fname : str Filename to", "3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24]) cm This is equivalent to: >>>", "NULL_UNIT if u.dimensions is angle and ufunc in trigonometric_operators: inp", "input_array, input_units=None, registry=None, dtype=None, bypass_validation=False): if dtype is None: dtype", "example, ``usecols = (1,4,5)`` will extract the 2nd, 5th and", "ImportError: positive, divmod_, isnat, heaviside = (None,)*4 from yt.units.unit_object import", "pint import UnitRegistry >>> import numpy as np >>> ureg", "associated with a unit. Parameters ---------- input_scalar : an integer", "YTArray([ 4.01, 5.02, 6.03]) m NumPy ufuncs will pass through", "def power_unit(unit, power): return unit**power @lru_cache(maxsize=128, typed=False) def square_unit(unit): return", "you do ndarray * YTQuantity. Explicitly # casting to YTArray", "'particle': units = finfo.output_units else: units = finfo.units if isinstance(x,", "-------- >>> sp = ds.sphere(\"c\", (100,\"kpc\")) >>> a = sp[\"density\"]", "np.intersect1d(arr1, arr2, assume_unique=assume_unique) v = validate_numpy_wrapper_units(v, [arr1, arr2]) return v", "ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__mul__(ro) def __rmul__(self, left_object):", "= g[dataset_name] # Overwrite without deleting if we can get", "# # Distributed under the terms of the Modified BSD", "state[0] except TypeError: # this case happens when we try", "left_object) return super(YTArray, self).__rtruediv__(lo) def __itruediv__(self, other): \"\"\" See __div__.", "first dimension and if ``axis=-1`` it will be the last", "divmod_, isnat, heaviside = (None,)*4 from yt.units.unit_object import Unit, UnitParseError", "if ufunc in unary_operators: out_arr, inp, u = get_inp_u_unary(ufunc, inputs,", "version of numpy installed is older than numpy 1.10.0. \"\"\"", "return v def ustack(arrs, axis=0): \"\"\"Join a sequence of arrays", ">>> b = YTArray(np.ones(5), 'code_length', registry=ds.unit_registry) >>> np.all(a == b)", "not the same as a yt UnitRegistry object. Examples --------", "# Start comparison operators. # def __lt__(self, other): \"\"\" Test", "dtype=dtype), input_array[0].units, registry=registry) # Input array is an already formed", "'cm') >>> b = YTQuantity(2, 'm') >>> a + b", "units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units) new_array = type(self)(self.ndview * conversion_factor,", "dimensionless, \\ em_dimensions from yt.utilities.exceptions import \\ YTUnitOperationError, YTUnitConversionError, \\", "# Check that the other is a YTArray. if other", "unit system, and returns it in that system's base units.", "returned in the current units. equivalence : string, optional The", "= Unit(input_units, registry=registry) return ret elif isinstance(input_array, np.ndarray): pass elif", "the sorted unique elements of the two input arrays. A", "string The filename to of the hdf5 file. dataset_name: string", "\"\"\" if not isinstance(arrays, list): arrays = [arrays] units =", "self).__add__(ro) def __radd__(self, left_object): \"\"\" See __add__. \"\"\" lo =", "return inps, units def handle_multiply_divide_units(unit, units, out, out_arr): if unit.is_dimensionless", "Parameters ---------- power : float or dimensionless YTArray. The pow", "of 1.0\"\"\" return YTQuantity(1.0, self.units) uq = unit_quantity @property def", "\"\"\" Add this ytarray to the object on the right", "the cases here, let the Unit class handle if #", "to separate values. By default, this is any whitespace. usecols", "return self.convert_to_units(self.units.get_mks_equivalent()) def in_units(self, units, equivalence=None, **kwargs): \"\"\" Creates a", "(): return YTQuantity(ret, units) else: # This could be a", "data-type The dtype of the array data. Defaults to the", ": str, optional The string used to separate values. By", "subtract: preserve_units, multiply: multiply_units, divide: divide_units, logaddexp: return_without_unit, logaddexp2: return_without_unit,", "< LooseVersion('1.13.0'): def __add__(self, right_object): \"\"\" Add this ytarray to", "# \"yr\" as \"year\" if str(unit).endswith(\"yr\") and len(str(unit)) in [2,3]:", "will be returned in the current units. equivalence : string,", "a1.units return v def uconcatenate(arrs, axis=0): \"\"\"Concatenate a sequence of", "type if input_units is None: # Nothing provided. Make dimensionless...", "None: obj.units.registry = registry return obj if input_array is NotImplemented:", "of arrays along a new axis while preserving units The", "the unit symbol lookup table # into the pickle file", "Returns ------- Quantity object with data converted to mks units.", "if isinstance(ret, YTArray): if inp.units.same_dimensions_as(ret.units): ret.in_units(inp.units) return ret def sanitize_units_add(this_object,", "\"\"\" return self.convert_to_units(self.units.get_mks_equivalent()) def in_units(self, units, equivalence=None, **kwargs): \"\"\" Creates", "units.append(\"%s**(%s)\" % (unit, Rational(pow))) units = \"*\".join(units) return unit_registry.Quantity(self.value, units)", "a = YTArray([1, 2, 3], 'cm') >>> b = YTArray([4,", "np >>> a = YTQuantity(12, 'g/cm**3') >>> np.abs(a) 12 g/cm**3", "isfinite, isinf, isnan, signbit, floor, ceil, trunc, modf, frexp, fabs,", "v def uunion1d(arr1, arr2): \"\"\"Find the union of two arrays.", "\\ bitwise_and, bitwise_or, bitwise_xor, invert, left_shift, right_shift, \\ greater, greater_equal,", "the new axis in the dimensions of the result. For", "ret.shape == (): return YTQuantity(ret, self.units, bypass_validation=True) else: if hasattr(self,", "return self.units.has_equivalent(equiv) def ndarray_view(self): \"\"\" Returns a view into the", "uintersect1d(arr1, arr2, assume_unique=False): \"\"\"Find the sorted unique elements of the", "that other is a YTArray. if other is None: #", "arr, unit_registry=None): \"\"\" Convert an AstroPy \"Quantity\" to a YTArray", "ret = coerce_iterable_units(other_object) # If the other object is a", "YTQuantity >>> a = YTQuantity(1, 'cm') >>> b = YTQuantity(2,", "skipped. Using this option may produce corrupted, invalid units or", "units = Unit(str(input_units), registry=registry) else: units = input_units else: #", "# attribute. if isinstance(ret, YTArray): if not inp.units.same_dimensions_as(ret.units): # handle", "for that function for descriptions of the keyword arguments. The", "is greater than or equal to other. \"\"\" # Check", ") binary_operators = ( add, subtract, multiply, divide, logaddexp, logaddexp2,", "if other is None: # self is a YTArray, so", "is None: info = {} info['units'] = str(self.units) info['unit_registry'] =", "self.units @return_arr def std(self, axis=None, dtype=None, out=None, ddof=0): return super(YTArray,", "return YTArray(input_object) return input_object else: return input_object def sanitize_units_mul(this_object, other_object):", "union of two arrays. A wrapper around numpy.intersect1d that preserves", "logical_or: comparison_unit, logical_xor: comparison_unit, logical_not: return_without_unit, maximum: preserve_units, minimum: preserve_units,", "unit_str == \"h\": unit_str = \"hr\" ap_units.append(\"%s**(%s)\" % (unit_str, Rational(exponent)))", "---------- arr : YTArray or YTQuantity The unitful quantity to", "ufunc, ret_class, raise_error=False): if units[0] != units[1]: u1d = units[0].is_dimensionless", "out.flat[:] if isinstance(out_orig[0], YTArray): out_orig[0].units = unit return out_arr def", "object, or string of known unit symbol, and check that", "arrays are datasets at the top level by default. \"\"\"", "has a units attribute, attempt to infer units as well.", "trigonometric_operators: inp = inp.in_units('radian').v if out_arr is not None: out_arr", "units = (units[1], units[1]) elif any_nonzero[1] == np.bool_(False): units =", "array and units to the equivalent mks units. \"\"\" return", "base.to_string() # we have to do this because AstroPy is", "By default, this is any whitespace. usecols : sequence, optional", "on the right. \"\"\" # Check that the other is", "preserve_units, fmin: preserve_units, isreal: return_without_unit, iscomplex: return_without_unit, isfinite: return_without_unit, isinf:", "subtract, multiply, divide, logaddexp, logaddexp2, true_divide, \\ floor_divide, negative, power,", "conv_unit.dimensions, **kwargs) if isinstance(new_arr, tuple): try: return type(self)(new_arr[0], new_arr[1]).in_units(unit) except", "This will create a copy of the data in the", "Check that other is a YTArray. if other is None:", "else: units.append(\"dimensionless\") if header != '': header += '\\n' header", "wish to convert to. equiv : string The equivalence you", "the object on the right. \"\"\" # Check that other", "h5py from yt.extern.six.moves import cPickle as pickle if info is", "get_inp_u_binary(ufunc, inputs) if unit_operator in (preserve_units, comparison_unit, arctan2_unit): inps, units", "start of a comment; default: '#'. Examples -------- >>> temp,", "unit.base_value, out=out) unit = Unit(registry=unit.registry) return out, out_arr, unit def", "= getattr(obj, 'units', NULL_UNIT) def __pos__(self): \"\"\" Posify the data.", "def __ixor__(self, other): np.bitwise_xor(self, other, out=self) return self def __and__(self,", "numpy.cross for full details. \"\"\" v = np.cross(arr1, arr2, axisa=axisa,", "data from the metadata extracted in __reduce__ and then serialized", "[2,3]: unit = str(unit).replace(\"yr\",\"year\") units.append(\"%s**(%s)\" % (unit, Rational(pow))) units =", "registry return obj if input_array is NotImplemented: return input_array.view(cls) if", "PR #1728 # when the pickle format changed if len(lut['m'])", "obj if input_array is NotImplemented: return input_array.view(cls) if registry is", "self.units.list_equivalencies() def has_equivalent(self, equiv): \"\"\" Check to see if this", "a1 = arrs[0] if not all(a.units == a1.units for a", "out, out_arr, unit def coerce_iterable_units(input_object): if isinstance(input_object, np.ndarray): return input_object", "super(YTArray, self).__neg__() def __mul__(self, right_object): \"\"\" Multiply this YTArray by", "== 2: lut.update(default_unit_symbol_lut) for k, v in [(k, v) for", "elif isinstance(input_units, Unit): if registry and registry is not input_units.registry:", "ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units def handle_multiply_divide_units(unit, units, out, out_arr):", "sanitize_units_add(self, other, \"subtraction\") np.subtract(self, oth, out=self) return self def __neg__(self):", "right. \"\"\" # converts if possible oth = validate_comparison_units(self, other,", "= sanitize_units_mul(self, right_object) return super(YTArray, self).__div__(ro) def __rdiv__(self, left_object): \"\"\"", "symbol, and check that it is compatible with this quantity.", "np.array(self) @classmethod def from_astropy(cls, arr, unit_registry=None): \"\"\" Convert an AstroPy", "__setstate__(self, state): \"\"\"Pickle setstate method This is called inside pickle.read()", "sp = ds.sphere(\"c\", (100,\"kpc\")) >>> a = sp[\"density\"] >>> b", "list_equivalencies(self): \"\"\" Lists the possible equivalencies associated with this YTArray", "None: return True oth = validate_comparison_units(self, other, 'not equal') return", "\"\"\" v = np.hstack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v", "to the ``header`` and ``footer`` strings, to mark them as", "array to attach units to input_units : String unit specification,", "pickle was saved prior to PR #1728 # when the", "this is greater than or equal to other. \"\"\" #", "is a wrapper around np.dot that preserves units. \"\"\" dot", "in the specified unit system, and returns it in that", "-------- >>> a = YTQuantity(4.0, \"cm**2/s\") >>> b = a.to_pint()", "def get_binary_op_return_class(cls1, cls2): if cls1 is cls2: return cls1 if", "mylog from .pint_conversions import convert_pint_units NULL_UNIT = Unit() POWER_SIGN_MAPPING =", "is None: out_arr = np.array(out_arr, copy=False) elif ufunc in (modf,", "1: _, inp, u = get_inp_u_unary(ufunc, inputs) out_arr = func(np.asarray(inp),", "usecols] mylog.info(\"Array units: %s\" % \", \".join(units)) return tuple([YTArray(arr, unit)", "to: >>> b = YTArray(np.ones(5), 'code_length', registry=ds.unit_registry) >>> np.all(a ==", "3., 4.]) cm \"\"\" v = np.concatenate(arrs, axis=axis) v =", "return_without_unit, bitwise_and: bitop_units, bitwise_or: bitop_units, bitwise_xor: bitop_units, invert: invert_units, left_shift:", "The filename to of the hdf5 file. dataset_name: string The", "of the Modified BSD License. # # The full license", "# This will create a copy of the data in", "right_shift, \\ greater, greater_equal, less, less_equal, not_equal, equal, logical_and, \\", "return_without_unit, sign: return_without_unit, conj: passthrough_unit, exp: return_without_unit, exp2: return_without_unit, log:", "you meant to do something like this instead: \\n\" \"ds.arr(%s,", "elif not np.any(this_object): return ret raise YTUnitOperationError(op_string, inp.units, ret.units) ret", "data.ds._get_field_info(field) if finfo.sampling_type == 'particle': units = finfo.output_units else: units", "input_array.units else: units = Unit(str(input_array.units), registry=registry) ret.units = units elif", "the same dimensions as the object # under consideration, convert", "import numpy as np >>> a = YTQuantity(12, 'g/cm**3') >>>", "comparison_unit, logical_not: return_without_unit, maximum: preserve_units, minimum: preserve_units, fmax: preserve_units, fmin:", "2]: lut[k] = v + (0.0, r'\\rm{' + k.replace('_', '\\", "= {'field':'dinosaurs', 'type':'field_data'} >>> a.write_hdf5('test_array_data.h5', dataset_name='dinosaurs', ... info=myinfo) \"\"\" from", "current_mks, \\ dimensionless, \\ em_dimensions from yt.utilities.exceptions import \\ YTUnitOperationError,", "def in_mks(self): \"\"\" Creates a copy of this array with", "return input_object else: return input_object def sanitize_units_mul(this_object, other_object): inp =", "== a1.units for a in arrs[1:]): raise RuntimeError(\"Your arrays must", "logical_not, maximum, minimum, fmax, fmin, \\ isreal, iscomplex, isfinite, isinf,", "' + str(self.units) # # Start unit conversion methods #", "= handle_preserve_units( inps, units, ufunc, ret_class) unit = unit_operator(*units) out_arr", "YTArray([1,2,3], 'cm') >>> myinfo = {'field':'dinosaurs', 'type':'field_data'} >>> a.write_hdf5('test_array_data.h5', dataset_name='dinosaurs',", "a = ds.quan(5, 'code_length') >>> a.in_cgs() 1.543e+25 cm This is", "Filename to read. dtype : data-type, optional Data-type of the", "supplementary info to write to append as attributes to the", "YTArray by the object on the right of the `/`", "\\ floor_divide, negative, power, remainder, mod, absolute, rint, \\ sign,", "ufunc(inp).view(np.ndarray) return out_arr, inp, u def get_inp_u_binary(ufunc, inputs): inp1 =", "if LooseVersion(np.__version__) < LooseVersion('1.10.0'): norm = np.linalg.norm(data, ord=ord, axis=axis) else:", "b YTArray([ 401., 502., 603.]) cm >>> b + a", "call YTArray directly. return type(args[0])(ret, units) return wrapped @lru_cache(maxsize=128, typed=False)", "is a YTArray. oth = validate_comparison_units( self, other, 'greater than", "# it's a str. units = Unit(input_units, registry=registry) # Attach", "of the array data. Examples -------- >>> from yt import", "dimensionless) return ret def validate_comparison_units(this, other, op_string): # Check that", "np.union1d(arr1, arr2) v = validate_numpy_wrapper_units(v, [arr1, arr2]) return v def", "offset: np.subtract(self, offset*self.uq, self) return self def convert_to_base(self, unit_system=\"cgs\"): \"\"\"", "significant overhead. If set, input_units *must* be a valid unit", "a sequence of arrays along a new axis while preserving", "uses np.float64 bypass_validation : boolean If True, all input validation", "is None: # Nothing provided. Make dimensionless... units = Unit()", "units. \"\"\" return self.in_units(self.units.get_cgs_equivalent()) def in_mks(self): \"\"\" Creates a copy", "all of your arrays are YTArrays.\") a1 = arrs[0] if", "as an ndarray rather than ytarray. Returns ------- View of", "an equivalent, e.g., something that is related by only a", "optional Data-type of the resulting array; default: float. delimiter :", "specified using python syntax (cm**3, not cm^3). registry : A", "sequence of arrays along a new axis while preserving units", "def return_without_unit(unit, unit2=None): return None def arctan2_unit(unit1, unit2): return NULL_UNIT", "num_cols = -1 for line in f.readlines(): words = line.strip().split()", "to infer units as well. group_name: string An optional group", "a circular import from yt.funcs def iterable(obj): try: len(obj) except:", "method This is called inside pickle.read() and restores the unit", "get_binary_op_return_class(cls1, cls2): if cls1 is cls2: return cls1 if cls1", "self.convert_to_units(self.units.get_cgs_equivalent()) def convert_to_mks(self): \"\"\" Convert the array and units to", "def write_hdf5(self, filename, dataset_name=None, info=None, group_name=None): r\"\"\"Writes a YTArray to", "registry, dtype=dtype, bypass_validation=bypass_validation) if ret.size > 1: raise RuntimeError(\"YTQuantity instances", "self.units.has_equivalent(equiv) def ndarray_view(self): \"\"\" Returns a view into the array,", "isinstance(self, YTQuantity): return float(v) else: return v def in_base(self, unit_system=\"cgs\"):", "new_arr.in_units(unit) else: raise YTInvalidUnitEquivalence(equiv, self.units, unit) def list_equivalencies(self): \"\"\" Lists", "or YTQuantity has an equivalent unit in *equiv*. \"\"\" return", "cls1 in (np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls1, (numeric_type, np.number, list,", "NOTE: This is not the same as a yt UnitRegistry", "is None: v = self.value else: v = self.in_units(units, equivalence=equivalence,", "this array with the data in the equivalent mks units,", "# Here we catch the first line of numbers try:", "out_arr, out_arr) else: raise RuntimeError( \"Support for the %s ufunc", "= self.units**self.shape[axis] else: units = self.units**self.size return super(YTArray, self).prod(axis, dtype,", "Each row in the text file must have the same", "with the data in the equivalent mks units, and returns", "return None def arctan2_unit(unit1, unit2): return NULL_UNIT def comparison_unit(unit1, unit2=None):", "super(YTArray, self).__sub__(ro) def __rsub__(self, left_object): \"\"\" See __sub__. \"\"\" lo", "object is a YTArray before we use the `units` #", "RuntimeError(\"Undefined operation for a YTArray subclass. \" \"Received operand types", "of the file, before the unit header. footer : str,", "copysign: passthrough_unit, nextafter: preserve_units, modf: passthrough_unit, ldexp: bitop_units, frexp: return_without_unit,", "Unit, UnitParseError from yt.units.unit_registry import UnitRegistry from yt.units.dimensions import \\", "append as attributes to the dataset. group_name: string An optional", "or YTQuantity. Parameters ---------- arr : Pint Quantity The Quantity", "return other.in_units(this.units) return other @lru_cache(maxsize=128, typed=False) def _unit_repr_check_same(my_units, other_units): \"\"\"", "# handle special case of adding or subtracting with zero", "operator. \"\"\" ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__div__(ro) def", "file. Each row in the text file must have the", "is not None: out_orig[0].flat[:] = out.flat[:] if isinstance(out_orig[0], YTArray): out_orig[0].units", "preserve_units, fmod: preserve_units, absolute: passthrough_unit, fabs: passthrough_unit, rint: return_without_unit, sign:", "import yt >>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.quan(5,", "self.in_units(units, equivalence=equivalence, **kwargs).value if isinstance(self, YTQuantity): return float(v) else: return", "= type(self) elif len(inputs) == 2: unit_operator = self._ufunc_registry[ufunc] inps,", "= Unit(unit, registry=registry) def __deepcopy__(self, memodict=None): \"\"\"copy.deepcopy implementation This is", "rather than ytarray. Returns ------- View of this array's data.", "arr2) v = validate_numpy_wrapper_units(v, [arr1, arr2]) return v def unorm(data,", "oth = validate_comparison_units(self, other, 'less_than') return super(YTArray, self).__lt__(oth) def __le__(self,", "= ufunc(inp).view(np.ndarray) return out_arr, inp, u def get_inp_u_binary(ufunc, inputs): inp1", "validate_comparison_units(this, other, op_string): # Check that other is a YTArray.", "zeros if not inp.units.is_dimensionless and np.any(ret): raise YTUnitOperationError(op_string, inp.units, dimensionless)", "!= units[1]: any_nonzero = [np.any(inps[0]), np.any(inps[1])] if any_nonzero[0] == np.bool_(False):", "v def ustack(arrs, axis=0): \"\"\"Join a sequence of arrays along", "= validate_comparison_units(self, other, 'less_than') return super(YTArray, self).__lt__(oth) def __le__(self, other):", "# this case happens when we try to load an", "return super(YTArray, self).__abs__() # # Start comparison operators. # def", "from yt.utilities.on_demand_imports import _h5py as h5py from yt.extern.six.moves import cPickle", "axis=None, keepdims=False): \"\"\"Matrix or vector norm that preserves units This", "isinstance(power, YTArray): if not power.units.is_dimensionless: raise YTUnitOperationError('power', power.unit) # Work", "cls2): if cls1 is cls2: return cls1 if cls1 in", "in powers_dict.items(): # we have to do this because Pint", "The registry to create units from. If input_units is already", "dtype='float', delimiter='\\t', usecols=None, comments='#'): r\"\"\" Load YTArrays with unit information", "units in the specified unit system. Parameters ---------- unit_system :", "arctanh, deg2rad, rad2deg, \\ bitwise_and, bitwise_or, bitwise_xor, invert, left_shift, right_shift,", "from yt.units.equivalencies import equivalence_registry from yt.utilities.logger import ytLogger as mylog", "f.create_group(group_name) else: g = f if dataset_name in g.keys(): d", "else: g = f dataset = g[dataset_name] data = dataset[:]", "sequence of strs, optional A single format (%10.5f), or a", "ret.shape == (): return YTQuantity(ret, units) else: # This could", "if registry is None: ret.units = input_array.units else: units =", "1 out_arr = YTArray(np.asarray(out_arr), unit) else: out_arr = ret_class(np.asarray(out_arr), unit)", "is None and hasattr(self, 'units'): return self.units = getattr(obj, 'units',", "unit2=None): return unit1 @lru_cache(maxsize=128, typed=False) def power_unit(unit, power): return unit**power", "\"erg/s\") >>> E.convert_to_base(unit_system=\"galactic\") \"\"\" return self.convert_to_units(self.units.get_base_equivalent(unit_system)) def convert_to_cgs(self): \"\"\" Convert", "# Work around a sympy issue (I think?) # #", "registry=self.units.registry) if self.units.same_dimensions_as(conv_unit): return self.in_units(conv_unit) this_equiv = equivalence_registry[equiv]() oneway_or_equivalent =", "__le__(self, other): \"\"\"Test if this is less than or equal", "set, input_units *must* be a valid unit object. Defaults to", "iscomplex: return_without_unit, isfinite: return_without_unit, isinf: return_without_unit, isnan: return_without_unit, signbit: return_without_unit,", "a Pint \"Quantity\" to a YTArray or YTQuantity. Parameters ----------", "logical_not: return_without_unit, maximum: preserve_units, minimum: preserve_units, fmax: preserve_units, fmin: preserve_units,", "1.0\"\"\" return YTQuantity(1.0, self.units) uq = unit_quantity @property def unit_array(self):", "unit, units, out_arr, out_arr) else: raise RuntimeError( \"Support for the", "the possible equivalencies associated with this YTArray or YTQuantity. \"\"\"", "other): \"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self, other) np.true_divide(self,", "to restore the state of the ndarray. This is always", "\"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self, other) np.divide(self, oth,", "current_mks in equiv_dims.free_symbols: base = \"SI\" else: base = \"CGS\"", "__isub__(self, other): \"\"\" See __sub__. \"\"\" oth = sanitize_units_add(self, other,", "return_without_unit, isfinite: return_without_unit, isinf: return_without_unit, isnan: return_without_unit, signbit: return_without_unit, copysign:", "right_object) return super(YTArray, self).__floordiv__(ro) def __rfloordiv__(self, left_object): \"\"\" See __div__.", "base units of cgs are used. Examples -------- >>> E", "# casting to YTArray avoids creating a YTQuantity with #", "self.units.get_conversion_factor(new_units) new_array = type(self)(self.ndview * conversion_factor, new_units) if offset: np.subtract(new_array,", "Rational(pow))) units = \"*\".join(units) return unit_registry.Quantity(self.value, units) # # End", "appropriate. >>> import numpy as np >>> a = YTArray(np.arange(8)", "dimension) units. \"\"\" ro = sanitize_units_add(self, right_object, \"addition\") return super(YTArray,", "all numpy versions, see # numpy issue #9081 return type(self)(super(YTArray,", "to do this because Pint doesn't recognize # \"yr\" as", "optional A yt unit registry to use in the conversion.", "__iadd__(self, other): \"\"\" See __add__. \"\"\" oth = sanitize_units_add(self, other,", "first line of numbers try: col_words = line.strip().split(delimiter) for word", "header='', footer='', comments='#'): r\"\"\" Write YTArrays with unit information to", "in info.items(): d.attrs[k] = v f.close() @classmethod def from_hdf5(cls, filename,", "logaddexp2: return_without_unit, true_divide: divide_units, floor_divide: divide_units, negative: passthrough_unit, power: power_unit,", "numpy.cross preserves units. See the documentation of numpy.cross for full", "file to write the YTArrays to. arrays : list of", "self).__setstate__(state[1:]) try: unit, lut = state[0] except TypeError: # this", "object on the right of the `*` operator. The unit", "the :meth:`list_equivalencies` method. Default: None Returns ------- NumPy array \"\"\"", "cls2 if issubclass(cls2, YTQuantity): return cls1 if issubclass(cls1, cls2): return", "bitwise_or, bitwise_xor, invert, left_shift, right_shift, \\ greater, greater_equal, less, less_equal,", "divmod as divmod_, isnat, heaviside except ImportError: positive, divmod_, isnat,", "Unit(str(input_units), registry=registry) else: units = input_units else: # units kwarg", "dtype=None, out=None): return super(YTArray, self).mean(axis, dtype, out), self.units @return_arr def", "np.any(inps[1])] if any_nonzero[0] == np.bool_(False): units = (units[1], units[1]) elif", "Development Team. # # Distributed under the terms of the", "of values. Parameters ---------- fname : str Filename to read.", "def sanitize_units_mul(this_object, other_object): inp = coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) #", "= coerce_iterable_units(other_object) # Make sure the other object is a", "log10, expm1, log1p, sqrt, square, reciprocal, sin, cos, tan, arcsin,", "coerce_iterable_units(input_object): if isinstance(input_object, np.ndarray): return input_object if iterable(input_object): if any([isinstance(o,", "RuntimeError(\"Your arrays must have identical units.\") v.units = a1.units return", "write the arrays to. If not specified, the arrays are", "equivalence_registry from yt.utilities.logger import ytLogger as mylog from .pint_conversions import", "super(YTArray, self).__rsub__(lo) def __isub__(self, other): \"\"\" See __sub__. \"\"\" oth", "self def __div__(self, right_object): \"\"\" Divide this YTArray by the", "think?) # # If I don't do this, super(YTArray, self).__pow__", "sanitize_units_mul(self, right_object) return super(YTArray, self).__truediv__(ro) def __rtruediv__(self, left_object): \"\"\" See", "pickle file unit, lut = str(state[0]), default_unit_symbol_lut.copy() # need to", "\"erg/s\") >>> E_new = E.in_base(unit_system=\"galactic\") \"\"\" return self.in_units(self.units.get_base_equivalent(unit_system)) def in_cgs(self):", "__xor__(self, right_object): return super(YTArray, self).__xor__(right_object) def __rxor__(self, left_object): return super(YTArray,", "\"subtraction\") return super(YTArray, self).__sub__(ro) def __rsub__(self, left_object): \"\"\" See __sub__.", "out_arr)) elif out_arr.size == 1: out_arr = YTQuantity(np.asarray(out_arr), unit) else:", "= YTQuantity(2.5, \"erg/s\") >>> E.convert_to_base(unit_system=\"galactic\") \"\"\" return self.convert_to_units(self.units.get_base_equivalent(unit_system)) def convert_to_cgs(self):", "units. \"\"\" return self.convert_to_units(self.units.get_mks_equivalent()) def in_units(self, units, equivalence=None, **kwargs): \"\"\"", "of the dataset to create in the file. info: dictionary", "np.ndarray): return YTArray(arr.magnitude, p_units, registry=unit_registry) else: return YTQuantity(arr.magnitude, p_units, registry=unit_registry)", "6th columns. The default, None, results in all columns being", "= handle_comparison_units( inps, units, ufunc, ret_class) elif unit_operator is preserve_units:", "input_object]): ff = getattr(input_object[0], 'units', NULL_UNIT, ) if any([ff !=", "default_unit_symbol_lut.copy() # need to fix up the lut if the", "cm >>> b + a 2.01 m NumPy ufuncs will", "The unit system to be used in the conversion. If", "to the dataset. group_name: string An optional group to write", "of strs, optional A single format (%10.5f), or a sequence", "to read the arrays from. If not specified, the arrays", "# This could be a subclass, so don't call YTArray", "len(words) == 2 and words[1] == \"Units\": next_one = True", "copy of this array with the unit information stripped \"\"\"", "string An optional group to write the arrays to. If", "to. If not specified, the arrays are datasets at the", "arr1.units * arr2.units arr = YTArray(v, units, registry=registry) return arr", "= [] for array in arrays: if hasattr(array, \"units\"): units.append(str(array.units))", "equal, logical_and, \\ logical_or, logical_xor, logical_not, maximum, minimum, fmax, fmin,", "dot product that preserves units This is a wrapper around", "g[dataset_name] # Overwrite without deleting if we can get away", "\"\"\" Takes a Unit object, or string of known unit", "ap_units, registry=unit_registry) else: return YTQuantity(arr.value, ap_units, registry=unit_registry) def to_astropy(self, **kwargs):", "registry=None, axisa=-1, axisb=-1, axisc=-1, axis=None): \"\"\"Applies the cross product to", "arctanh, deg2rad, rad2deg, invert, logical_not, isreal, iscomplex, isfinite, isinf, isnan,", "right_object): \"\"\" Multiply this YTArray by the object on the", "if bypass_validation is True: obj = np.asarray(input_array, dtype=dtype).view(cls) obj.units =", "ret.units = input_units else: ret.units = Unit(input_units, registry=registry) return ret", "is an already formed ndarray instance # We first cast", "other is None: return True oth = validate_comparison_units(self, other, 'not", "the file. info: dictionary A dictionary of supplementary info to", "\\ isreal, iscomplex, isfinite, isinf, isnan, signbit, copysign, nextafter, \\", "wish to use. To see which equivalencies are supported for", "== (): return YTQuantity(ret, self.units, bypass_validation=True) else: if hasattr(self, 'units'):", "out_arr = tuple((ret_class(o, unit) for o in out_arr)) elif out_arr.size", "full license is in the file COPYING.txt, distributed with this", "get_inp_u_unary(ufunc, inputs) out_arr = func(np.asarray(inp), out=out, **kwargs) if ufunc in", "for YTArray instances\") def get_inp_u_unary(ufunc, inputs, out_arr=None): inp = inputs[0]", "supplied, the default one will be used. \"\"\" # Converting", "if str(unit).endswith(\"yr\") and len(str(unit)) in [2,3]: unit = str(unit).replace(\"yr\",\"year\") units.append(\"%s**(%s)\"", "along a new axis while preserving units The axis parameter", "self.units**self.size return super(YTArray, self).prod(axis, dtype, out), units @return_arr def mean(self,", "comparison_unit(unit1, unit2=None): return None def invert_units(unit): raise TypeError( \"Bit-twiddling operators", "# Check that the other is a YTArray. oth =", "inputs, out_arr=None): inp = inputs[0] u = getattr(inp, 'units', None)", "input_object else: return input_object def sanitize_units_mul(this_object, other_object): inp = coerce_iterable_units(this_object)", "units. Output is therefore a bare NumPy array. Optionally, an", "= (((str(self.units), self.units.registry.lut),) + obj_state[:],) new_ret = np_ret[:2] + unit_state", "\"\"\" from pint import UnitRegistry if unit_registry is None: unit_registry", "ap_units = [] for base, exponent in zip(u.bases, u.powers): unit_str", "lut[k] = v + (0.0, r'\\rm{' + k.replace('_', '\\ ')", "positive, divmod as divmod_, isnat, heaviside except ImportError: positive, divmod_,", "preserving units This is a wrapper around np.hstack that preserves", "np.all(a == b) True \"\"\" _ufunc_registry = { add: preserve_units,", "Output is therefore a bare NumPy array. Optionally, an equivalence", "e.g. ``yt.loadtxt``. Examples -------- >>> sp = ds.sphere(\"c\", (100,\"kpc\")) >>>", "(row wise) while preserving units This is a wrapper around", "name of the dataset to read from. If the dataset", "is a YTArray. oth = validate_comparison_units(self, other, 'greater than') return", "is not supplied, the default one will be used. \"\"\"", "multiply_units(unit1, unit2): return unit1 * unit2 def preserve_units(unit1, unit2=None): return", "unit conversion methods # def convert_to_units(self, units): \"\"\" Convert the", ":meth:`list_equivalencies` method. Default: None Returns ------- YTArray \"\"\" if equivalence", "in [2,3]: unit = str(unit).replace(\"yr\",\"year\") units.append(\"%s**(%s)\" % (unit, Rational(pow))) units", "data-type The dtype of the array data. Examples -------- >>>", "u def get_inp_u_binary(ufunc, inputs): inp1 = coerce_iterable_units(inputs[0]) inp2 = coerce_iterable_units(inputs[1])", "file. info: dictionary A dictionary of supplementary info to write", "None Returns ------- YTArray \"\"\" if equivalence is None: new_units", "arctan2: arctan2_unit, arcsinh: return_without_unit, arccosh: return_without_unit, arctanh: return_without_unit, hypot: preserve_units,", "def __rsub__(self, left_object): \"\"\" See __sub__. \"\"\" lo = sanitize_units_add(self,", "finfo = data.ds._get_field_info(field) if finfo.sampling_type == 'particle': units = finfo.output_units", "of the two input arrays. A wrapper around numpy.intersect1d that", "len(units) != num_cols: mylog.warning(\"Malformed or incomplete units header. Arrays will", "sequence, optional Which columns to read, with 0 being the", "for k, v in [(k, v) for k, v in", "lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rmul__(lo) def __imul__(self, other):", "is None: ret.units = input_array.units else: units = Unit(str(input_array.units), registry=registry)", "return self def __xor__(self, right_object): return super(YTArray, self).__xor__(right_object) def __rxor__(self,", "dataset in an hdf5 file into a YTArray. Parameters ----------", "data. \"\"\" # this needs to be defined for all", "= \"SI\" else: base = \"CGS\" raise YTEquivalentDimsError(my_units, other_units, base)", "raise_error=True) unit = unit_operator(*units) if unit_operator in (multiply_units, divide_units): out_arr,", "of numpy installed is older than numpy 1.10.0. \"\"\" if", "numbers try: col_words = line.strip().split(delimiter) for word in col_words: float(word)", "else: out = None if len(inputs) == 1: _, inp,", "``axis=-1`` it will be the last dimension. This is a", "def __lt__(self, other): \"\"\" Test if this is less than", "data = dataset[:] units = dataset.attrs.get('units', '') if 'unit_registry' in", "else: ret.units = Unit(input_units, registry=registry) return ret elif isinstance(input_array, np.ndarray):", "dtype=None, bypass_validation=False): if dtype is None: dtype = getattr(input_array, 'dtype',", "'# ', as expected by e.g. ``yt.loadtxt``. Examples -------- >>>", "isinstance(input_units, Unit): if registry and registry is not input_units.registry: units", "See __mul__. \"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rmul__(lo)", "Which columns to read, with 0 being the first. For", "(np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls1, (numeric_type, np.number, list, tuple)): return", "used to restore the state of the ndarray. This is", "a wrapper around np.dot that preserves units. \"\"\" dot =", "= self.units.get_conversion_factor(new_units) self.units = new_units values = self.d values *=", "not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units))", "with them. >>> print(np.log10(a)) 1.07918124605 YTQuantity is tightly integrated with", "= np.linalg.norm(data, ord=ord, axis=axis) else: norm = np.linalg.norm(data, ord=ord, axis=axis,", "__sub__. \"\"\" lo = sanitize_units_add(self, left_object, \"subtraction\") return super(YTArray, self).__rsub__(lo)", "= self._ufunc_registry[ufunc] inps, units, ret_class = get_inp_u_binary(ufunc, inputs) if unit_operator", "YTArray) for a in arrs): return v if not all(isinstance(a,", "around np.vstack that preserves units. \"\"\" v = np.vstack(arrs) v", "= data.ds._get_field_info(field[0],field[1]) else: finfo = data.ds._get_field_info(field) if finfo.sampling_type == 'particle':", "unit2 def preserve_units(unit1, unit2=None): return unit1 @lru_cache(maxsize=128, typed=False) def power_unit(unit,", "for base, exponent in arr._units.items(): bs = convert_pint_units(base) p_units.append(\"%s**(%s)\" %", "v = validate_numpy_wrapper_units(v, arrs) return v def ustack(arrs, axis=0): \"\"\"Join", "of the quantity. Powers must be specified using python syntax", "self.units) def __array_finalize__(self, obj): if obj is None and hasattr(self,", "Parameters ---------- units : Unit object or string, optional The", "out_orig[0].flat[:] = out.flat[:] if isinstance(out_orig[0], YTArray): out_orig[0].units = unit return", "copy of this array with the data in the specified", "UnitRegistry object The registry to create units from. If input_units", "= super(YTArray, self).__getitem__(item) if ret.shape == (): return YTQuantity(ret, self.units,", "in all columns being read. comments : str, optional The", "nextafter, ldexp, fmod, divmod_, heaviside ) trigonometric_operators = ( sin,", "header += \" Units\\n \" + '\\t'.join(units) np.savetxt(fname, np.transpose(arrays), header=header,", "optional The units you want to get the bare quantity", "= f[group_name] else: g = f dataset = g[dataset_name] data", "the input validation logic adds significant overhead. If set, input_units", "in the input validation logic adds significant overhead. If set,", "unorm(data, ord=None, axis=None, keepdims=False): \"\"\"Matrix or vector norm that preserves", "if len(words) == 2 and words[1] == \"Units\": next_one =", "context is None: if ret.shape == (): return ret[()] else:", "# Nothing provided. Make dimensionless... units = Unit() elif isinstance(input_units,", "arrs) return v def uhstack(arrs): \"\"\"Stack arrays in sequence horizontally", "POWER_SIGN_MAPPING = {multiply: 1, divide: -1} # redefine this here", "units. \"\"\" return self.in_units(self.units.get_mks_equivalent()) def to_equivalent(self, unit, equiv, **kwargs): \"\"\"", "of YTArrays or single YTArray The array(s) to write to", "units=None, equivalence=None, **kwargs): \"\"\" Creates a copy of this array", "in kwargs and kwargs['axis'] is not None: unit = u**(power_sign*inp.shape[kwargs['axis']])", "__rtruediv__(self, left_object): \"\"\" See __div__. \"\"\" lo = sanitize_units_mul(self, left_object)", "preserve_units, fmax: preserve_units, fmin: preserve_units, isreal: return_without_unit, iscomplex: return_without_unit, isfinite:", "or equal') return super(YTArray, self).__ge__(oth) def __gt__(self, other): \"\"\" Test", "__ge__(self, other): \"\"\" Test if this is greater than or", "k, v in [(k, v) for k, v in lut.items()", "numpy 1.10.0. \"\"\" if LooseVersion(np.__version__) < LooseVersion('1.10.0'): norm = np.linalg.norm(data,", "and returns it. Returns ------- Quantity object with data converted", "# into the pickle file unit, lut = str(state[0]), default_unit_symbol_lut.copy()", "unit information to a text file. Parameters ---------- fname :", "units, registry=registry) # # Start convenience methods # @property def", "cm This is equivalent to: >>> b = YTQuantity(5, 'code_length',", "the object on the right of the `-` from this", "Defaults to False. Examples -------- >>> from yt import YTArray", "= finfo.output_units else: units = finfo.units if isinstance(x, YTArray): arr", "else: raise YTInvalidUnitEquivalence(equiv, self.units, unit) def list_equivalencies(self): \"\"\" Lists the", "# If I don't do this, super(YTArray, self).__pow__ returns a", "YTArray([1, 2, 3], 'cm') >>> b = YTArray([4, 5, 6],", "ret def sanitize_units_add(this_object, other_object, op_string): inp = coerce_iterable_units(this_object) ret =", "not power.units.is_dimensionless: raise YTUnitOperationError('power', power.unit) # Work around a sympy", "\"\"\" oth = sanitize_units_mul(self, other) np.divide(self, oth, out=self) return self", "arrs): raise RuntimeError(\"Not all of your arrays are YTArrays.\") a1", "obj = np.asarray(input_array, dtype=dtype).view(cls) obj.units = input_units if registry is", "None: memodict = {} ret = super(YTArray, self).__deepcopy__(memodict) return type(self)(ret,", "np_ret = super(YTArray, self).__reduce__() obj_state = np_ret[2] unit_state = (((str(self.units),", "------- Quantity object with data converted to mks units. \"\"\"", "invert_units(unit): raise TypeError( \"Bit-twiddling operators are not defined for YTArray", "preserving units This is a wrapper around np.vstack that preserves", "use in the conversion. If one is not supplied, the", "greater than the object on the right. \"\"\" # Check", "__rmul__(self, left_object): \"\"\" See __mul__. \"\"\" lo = sanitize_units_mul(self, left_object)", "self).__floordiv__(ro) def __rfloordiv__(self, left_object): \"\"\" See __div__. \"\"\" lo =", "units def handle_comparison_units(inps, units, ufunc, ret_class, raise_error=False): if units[0] !=", "g/cm**3 and strip them when it would be annoying to", "= \"hr\" ap_units.append(\"%s**(%s)\" % (unit_str, Rational(exponent))) ap_units = \"*\".join(ap_units) if", "self).__rmul__(lo) def __imul__(self, other): \"\"\" See __mul__. \"\"\" oth =", "units: %s\" % \", \".join(units)) return tuple([YTArray(arr, unit) for arr,", "is None: if ret.shape == (): return ret[()] else: return", "handle_preserve_units(inps, units, ufunc, ret_class): if units[0] != units[1]: any_nonzero =", "read the arrays from. If not specified, the arrays are", "dtype, out), self.units @return_arr def sum(self, axis=None, dtype=None, out=None): return", "units = arr1.units * arr2.units arr = YTArray(v, units, registry=registry)", "yt.units.unit_lookup_table import \\ default_unit_symbol_lut from yt.units.equivalencies import equivalence_registry from yt.utilities.logger", "an AstroPy \"Quantity\" to a YTArray or YTQuantity. Parameters ----------", "numpy as np >>> a = YTQuantity(12, 'g/cm**3') >>> np.abs(a)", "issubclass(cls1, (numeric_type, np.number, list, tuple)): return cls2 if cls2 in", "units obj.units = units return obj def __repr__(self): \"\"\" \"\"\"", "np.bool_(False): units = (units[0], units[0]) elif not any([u1d, u2d]): if", "array(s) to write to the file. fmt : str or", "class. \"\"\" from __future__ import print_function #----------------------------------------------------------------------------- # Copyright (c)", "object on the right of the `/` operator. \"\"\" ro", "isnat, ) binary_operators = ( add, subtract, multiply, divide, logaddexp,", "be annoying to deal with them. >>> np.log10(a) array([ -inf,", "to get a new quantity in. equivalence : string, optional", "self.units.registry.lut),) + obj_state[:],) new_ret = np_ret[:2] + unit_state + np_ret[3:]", "v) for k, v in lut.items() if len(v) == 2]:", "k, v in lut.items() if len(v) == 2]: lut[k] =", "from yt.extern.six.moves import cPickle as pickle if dataset_name is None:", "implementation This is necessary for stdlib deepcopy of arrays and", "to a dataset. \\n\" \"Perhaps you meant to do something", "(numeric_type, np.number, list, tuple)): return cls2 if cls2 in (np.ndarray,", "in trigonometric_operators: inp = inp.in_units('radian').v if out_arr is not None:", "None)) if unit2 is None and ufunc is not power:", "ord=ord, axis=axis, keepdims=keepdims) if norm.shape == (): return YTQuantity(norm, data.units)", "ret[()] else: return ret ufunc = context[0] inputs = context[1]", "unit system to be used in the conversion. If not", "NumPy array. Optionally, an equivalence can be specified to convert", "YTArray): return YTArray(np.array(input_array, dtype=dtype), input_array[0].units, registry=registry) # Input array is", "out_arr, inp, u def get_inp_u_binary(ufunc, inputs): inp1 = coerce_iterable_units(inputs[0]) inp2", "[(k, v) for k, v in lut.items() if len(v) ==", "one will be used. NOTE: This is not the same", "a copy of this array with the unit information stripped", "to the equivalent base units in the specified unit system.", "the same unit as this array and a value of", "units.append(\"dimensionless\") if header != '': header += '\\n' header +=", "equivalent unit in *equiv*. \"\"\" return self.units.has_equivalent(equiv) def ndarray_view(self): \"\"\"", "to an equivalent, e.g., something that is related by only", "case happens when we try to load an old pickle", "sanitize_units_mul(self, right_object) return super(YTArray, self).__floordiv__(ro) def __rfloordiv__(self, left_object): \"\"\" See", "of supplementary info to write to append as attributes to", "in *equiv*. \"\"\" return self.units.has_equivalent(equiv) def ndarray_view(self): \"\"\" Returns a", "__rsub__(self, left_object): \"\"\" See __sub__. \"\"\" lo = sanitize_units_add(self, left_object,", "unit1/unit2 @lru_cache(maxsize=128, typed=False) def reciprocal_unit(unit): return unit**-1 def passthrough_unit(unit, unit2=None):", "to write the arrays to. If not specified, the arrays", "@return_arr def mean(self, axis=None, dtype=None, out=None): return super(YTArray, self).mean(axis, dtype,", "\"\"\" Creates a copy of this array with the data", "return arr def uintersect1d(arr1, arr2, assume_unique=False): \"\"\"Find the sorted unique", "Unit metadata is encoded in the zeroth element of third", "object on the right. \"\"\" oth = validate_comparison_units(self, other, 'less_than", "+ (0.0, r'\\rm{' + k.replace('_', '\\ ') + '}') registry", "be numeric\") ret = YTArray.__new__(cls, input_scalar, input_units, registry, dtype=dtype, bypass_validation=bypass_validation)", "a numpy ndarray\"\"\" return np.array(self) v = value @property def", "a text file. Each row in the text file must", "under consideration, convert so we don't mix units with the", "use the `units` # attribute. if isinstance(ret, YTArray): if not", "= sanitize_units_add(self, right_object, \"addition\") return super(YTArray, self).__add__(ro) def __radd__(self, left_object):", "elif iterable(input_array) and input_array: if isinstance(input_array[0], YTArray): return YTArray(np.array(input_array, dtype=dtype),", "np.float64) if bypass_validation is True: obj = np.asarray(input_array, dtype=dtype).view(cls) obj.units", "without referring to a dataset. \\n\" \"Perhaps you meant to", "item): ret = super(YTArray, self).__getitem__(item) if ret.shape == (): return", "usecols is not None: units = [units[col] for col in", "Check units type if input_units is None: # Nothing provided.", "methods # if LooseVersion(np.__version__) < LooseVersion('1.13.0'): def __add__(self, right_object): \"\"\"", "will be written at the beginning of the file, before", "import equivalence_registry from yt.utilities.logger import ytLogger as mylog from .pint_conversions", "f if dataset_name in g.keys(): d = g[dataset_name] # Overwrite", "np.bitwise_or(self, other, out=self) return self def __xor__(self, right_object): return super(YTArray,", "logaddexp, logaddexp2, true_divide, power, remainder, mod, arctan2, hypot, bitwise_and, bitwise_or,", "= type(self)(self.ndview * conversion_factor, new_units) if offset: np.subtract(new_array, offset*new_array.uq, new_array)", "unit2 = 1.0 return (inp1, inp2), (unit1, unit2), ret_class def", "to a YTArray or YTQuantity. Parameters ---------- arr : Pint", "= op1.units*op2.units if dot.shape == (): return YTQuantity(dot, units) return", "2 and words[1] == \"Units\": next_one = True else: #", "of the arrays must be # dimensionless or filled with", "to the equivalency, which should be used if that particular", "_unit_repr_check_same(self.units, units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units) self.units = new_units values", "data in the supplied units, and returns it without units.", "e.g., something that is related by only a constant factor", "arr2, assume_unique=assume_unique) v = validate_numpy_wrapper_units(v, [arr1, arr2]) return v def", "= { add: preserve_units, subtract: preserve_units, multiply: multiply_units, divide: divide_units,", "__div__. \"\"\" oth = sanitize_units_mul(self, other) np.floor_divide(self, oth, out=self) return", "(100,\"kpc\")) >>> a = sp[\"density\"] >>> b = sp[\"temperature\"] >>>", ">>> b = YTArray([4, 5, 6], 'm') >>> a +", "arrays are YTArrays.\") a1 = arrs[0] if not all(a.units ==", "out_arr = YTArray(np.asarray(out_arr), unit) else: out_arr = ret_class(np.asarray(out_arr), unit) if", "if len(inputs) == 1: _, inp, u = get_inp_u_unary(ufunc, inputs)", "print_function #----------------------------------------------------------------------------- # Copyright (c) 2013, yt Development Team. #", "self).__deepcopy__(memodict) return type(self)(ret, copy.deepcopy(self.units)) class YTQuantity(YTArray): \"\"\" A scalar associated", "= dataset.attrs.get('units', '') if 'unit_registry' in dataset.attrs.keys(): unit_lut = pickle.loads(dataset.attrs['unit_registry'].tostring())", "= getattr(input_array, 'dtype', np.float64) if bypass_validation is True: obj =", "catch the first line of numbers try: col_words = line.strip().split(delimiter)", "the data in the equivalent mks units, and returns it.", ": data-type, optional Data-type of the resulting array; default: float.", "left_object): \"\"\" See __mul__. \"\"\" lo = sanitize_units_mul(self, left_object) return", "v = np.intersect1d(arr1, arr2, assume_unique=assume_unique) v = validate_numpy_wrapper_units(v, [arr1, arr2])", "out_arr): if unit.is_dimensionless and unit.base_value != 1.0: if not units[0].is_dimensionless:", "oth = validate_comparison_units( self, other, 'greater than or equal') return", "# Start convenience methods # @property def value(self): \"\"\"Get a", "same units. See the documentation of numpy.intersect1d for full details.", "'units'): return self.units = getattr(obj, 'units', NULL_UNIT) def __pos__(self): \"\"\"", "\"ds.arr(%s, \\\"%s\\\")\" % (input_array, input_units) ) if isinstance(input_array, YTArray): ret", "of the resulting array; default: float. delimiter : str, optional", "set, but it's not a Unit object. # don't handle", "return self def __div__(self, right_object): \"\"\" Divide this YTArray by", "out = np.asarray(out_orig[0]) else: out = None if len(inputs) ==", "str(self.units) info['unit_registry'] = np.void(pickle.dumps(self.units.registry.lut)) if dataset_name is None: dataset_name =", "== (): return YTQuantity(ret, units) else: # This could be", "YTArray and has the same dimensions as the object #", "import wraps from numpy import \\ add, subtract, multiply, divide,", "ufunc, ret_class, raise_error=True) unit = unit_operator(*units) if unit_operator in (multiply_units,", "return_without_unit, conj: passthrough_unit, exp: return_without_unit, exp2: return_without_unit, log: return_without_unit, log2:", "the right of the `+` operator. Must check for the", "absolute, rint, \\ sign, conj, exp, exp2, log, log2, log10,", "in the current units. equivalence : string, optional The equivalence", "units from. If input_units is already associated with a unit", "read in and convert a dataset in an hdf5 file", "= inputs[0] u = getattr(inp, 'units', None) if u is", "out_arr, unit = handle_multiply_divide_units( unit, units, out, out_arr) else: raise", "it. Optionally, an equivalence can be specified to convert to", "equivalent quantity which is not in the same dimensions. ..", "= [\"dimensionless\"]*num_cols arrays = np.loadtxt(fname, dtype=dtype, comments=comments, delimiter=delimiter, converters=None, unpack=True,", "values. Parameters ---------- fname : str Filename to read. dtype", "ret raise YTUnitOperationError(op_string, inp.units, ret.units) ret = ret.in_units(inp.units) else: #", "around np.dot that preserves units. \"\"\" dot = np.dot(op1.d, op2.d)", "signbit, floor, ceil, trunc, modf, frexp, fabs, spacing, positive, isnat,", "\"\"\" Returns a view into the array, but as an", "YTQuantity. Explicitly # casting to YTArray avoids creating a YTQuantity", "any_nonzero = [np.any(inps[0]), np.any(inps[1])] if any_nonzero[0] == np.bool_(False): units =", "import UnitRegistry if unit_registry is None: unit_registry = UnitRegistry() powers_dict", "power. Parameters ---------- power : float or dimensionless YTArray. The", "array is an already formed ndarray instance # We first", "or array to attach units to input_units : String unit", "YTArray([ 401., 502., 603.]) cm >>> b + a YTArray([", "with zero or # array filled with zero if not", "oth = validate_comparison_units(self, other, 'not equal') return super(YTArray, self).__ne__(oth) def", "sequence of arrays. This wrapper around numpy.concatenate preserves units. All", "**kwargs): \"\"\" Convert a YTArray or YTQuantity to an equivalent,", "d = g[dataset_name] # Overwrite without deleting if we can", "is not None: out_arr = ufunc(inp).view(np.ndarray) return out_arr, inp, u", "\"\"\" oth = sanitize_units_mul(self, other) np.floor_divide(self, oth, out=self) return self", "super(YTArray, self).__repr__()+' '+self.units.__repr__() def __str__(self): \"\"\" \"\"\" return str(self.view(np.ndarray)) +", "return v def array_like_field(data, x, field): field = data._determine_fields(field)[0] if", "registry = UnitRegistry(lut=lut, add_default_symbols=False) self.units = Unit(unit, registry=registry) def __deepcopy__(self,", "units. \"\"\" return self.convert_to_units(self.units.get_cgs_equivalent()) def convert_to_mks(self): \"\"\" Convert the array", "other): \"\"\" Test if this is greater than or equal", "self.units) uq = unit_quantity @property def unit_array(self): \"\"\"Get a YTArray", "list, or array to attach units to input_units : String", "them. Parameters ---------- units : Unit object or string The", "a = YTQuantity(1, 'cm') >>> b = YTQuantity(2, 'm') >>>", "a in arrs): return v if not all(isinstance(a, YTArray) for", "else: raise RuntimeError(\"Undefined operation for a YTArray subclass. \" \"Received", "units.append(str(array.units)) else: units.append(\"dimensionless\") if header != '': header += '\\n'", "units The units of the quantity. Powers must be specified", "0 being the first. For example, ``usecols = (1,4,5)`` will", "that is related by only a constant factor but not", "abs of the data. \"\"\" return super(YTArray, self).__abs__() # #", "the other object is a YTArray and has the same", "super(YTArray, self).__or__(right_object) def __ror__(self, left_object): return super(YTArray, self).__ror__(left_object) def __ior__(self,", "return_without_unit, expm1: return_without_unit, log1p: return_without_unit, sqrt: sqrt_unit, square: square_unit, reciprocal:", "Unit): ret.units = input_units else: ret.units = Unit(input_units, registry=registry) return", "def __iand__(self, other): np.bitwise_and(self, other, out=self) return self def __pow__(self,", "= YTArray(v, units, registry=registry) return arr def uintersect1d(arr1, arr2, assume_unique=False):", "symbol object, or astropy units The units of the quantity.", "4], 'cm') >>> uunion1d(A, B) YTArray([ 1., 2., 3., 4.])", "= self.units.expr.as_powers_dict() units = [] for unit, pow in powers_dict.items():", "'units', NULL_UNIT, ) if any([ff != getattr(_, 'units', NULL_UNIT) for", "units This is a wrapper around np.linalg.norm that preserves units.", "self.ndarray_view() d = ndview @property def unit_quantity(self): \"\"\"Get a YTQuantity", "LooseVersion(np.__version__) < LooseVersion('1.10.0'): norm = np.linalg.norm(data, ord=ord, axis=axis) else: norm", "invalid units or array data, but can lead to significant", "dataset_name='dinosaurs', ... info=myinfo) \"\"\" from yt.utilities.on_demand_imports import _h5py as h5py", "YTArray(arr.value, ap_units, registry=unit_registry) else: return YTQuantity(arr.value, ap_units, registry=unit_registry) def to_astropy(self,", "def __rand__(self, left_object): return super(YTArray, self).__rand__(left_object) def __iand__(self, other): np.bitwise_and(self,", "if isinstance(input_array, YTArray): ret = input_array.view(cls) if input_units is None:", "'greater than') return super(YTArray, self).__gt__(oth) # # End comparison operators", "getattr(inp, 'units', None) if u is None: u = NULL_UNIT", "data=self) for k, v in info.items(): d.attrs[k] = v f.close()", "from_pint(cls, arr, unit_registry=None): \"\"\" Convert a Pint \"Quantity\" to a", "Unit object or str The units you want to convert", "we have to do this because AstroPy is silly and", ">>> B = yt.YTArray([2, 3, 4], 'cm') >>> uconcatenate((A, B))", "float(word) num_cols = len(col_words) break except ValueError: mylog.warning(\"Unrecognized character at", "unit) else: if ret_class is YTQuantity: # This happens if", "False units = [] num_cols = -1 for line in", "getattr(inp2, 'units', None) ret_class = get_binary_op_return_class(type(inp1), type(inp2)) if unit1 is", "} __array_priority__ = 2.0 def __new__(cls, input_array, input_units=None, registry=None, dtype=None,", "**kwargs) if unit_operator in (multiply_units, divide_units): out, out_arr, unit =", "import \\ angle, \\ current_mks, \\ dimensionless, \\ em_dimensions from", "= validate_comparison_units(self, other, 'not equal') return super(YTArray, self).__ne__(oth) def __ge__(self,", "self.units, unit) else: return new_arr.in_units(unit) else: raise YTInvalidUnitEquivalence(equiv, self.units, unit)", "arctan2, hypot, bitwise_and, bitwise_or, bitwise_xor, left_shift, right_shift, greater, greater_equal, less,", "if isinstance(arr.value, np.ndarray): return YTArray(arr.value, ap_units, registry=unit_registry) else: return YTQuantity(arr.value,", "units = input_units else: # units kwarg set, but it's", "YTArray(np.array(input_array, dtype=dtype), input_array[0].units, registry=registry) # Input array is an already", "data.ds._get_field_info(field[0],field[1]) else: finfo = data.ds._get_field_info(field) if finfo.sampling_type == 'particle': units", "---------- arr : Pint Quantity The Quantity to convert from.", "not None: obj.units.registry = registry return obj if input_array is", "units[0].is_dimensionless: if units[0].dimensions == units[1].dimensions: out_arr = np.multiply(out_arr.view(np.ndarray), unit.base_value, out=out)", "v + (0.0, r'\\rm{' + k.replace('_', '\\ ') + '}')", "registry=None, dtype=np.float64, bypass_validation=False): if not isinstance(input_scalar, (numeric_type, np.number, np.ndarray)): raise", "\\\"%s\\\")\" % (input_array, input_units) ) if isinstance(input_array, YTArray): ret =", "'registry', None)) if unit2 is None and ufunc is not", "= f.create_group(group_name) else: g = f if dataset_name in g.keys():", "\"\"\"Get a YTArray filled with ones with the same unit", "float(v) else: return v def in_base(self, unit_system=\"cgs\"): \"\"\" Creates a", "def coerce_iterable_units(input_object): if isinstance(input_object, np.ndarray): return input_object if iterable(input_object): if", "if not my_units.same_dimensions_as(other_units): raise YTUnitConversionError( my_units, my_units.dimensions, other_units, other_units.dimensions) return", "handle_multiply_divide_units(unit, units, out, out_arr): if unit.is_dimensionless and unit.base_value != 1.0:", "but can lead to significant speedups in the input validation", "\" + \"an AstroPy quantity.\") return self.value*_astropy.units.Unit(str(self.units), **kwargs) @classmethod def", "out=self) return self def __truediv__(self, right_object): ro = sanitize_units_mul(self, right_object)", "return super(YTArray, self).__div__(ro) def __rdiv__(self, left_object): \"\"\" See __div__. \"\"\"", "= v f.close() @classmethod def from_hdf5(cls, filename, dataset_name=None, group_name=None): r\"\"\"Attempts", "from yt.utilities.logger import ytLogger as mylog from .pint_conversions import convert_pint_units", "@lru_cache(maxsize=128, typed=False) def divide_units(unit1, unit2): return unit1/unit2 @lru_cache(maxsize=128, typed=False) def", "that function for descriptions of the keyword arguments. The keepdims", "in input_object]): raise YTIterableUnitCoercionError(input_object) # This will create a copy", "read, with 0 being the first. For example, ``usecols =", "dtype : data-type The dtype of the array data. Defaults", "comments. Default: '# ', as expected by e.g. ``yt.loadtxt``. Examples", "comments : str, optional The character used to indicate the", "b = YTQuantity(2, 'm') >>> a + b 201.0 cm", "UnitRegistry to use in the conversion. If one is not", "# Copyright (c) 2013, yt Development Team. # # Distributed", "unit_lut = None f.close() registry = UnitRegistry(lut=unit_lut, add_default_symbols=False) return cls(data,", "if issubclass(cls2, YTQuantity): return cls1 if issubclass(cls1, cls2): return cls1", "self, other, 'greater than or equal') return super(YTArray, self).__ge__(oth) def", "raise YTUnitOperationError('power', power.unit) # Work around a sympy issue (I", "YTArray with the abs of the data. \"\"\" return super(YTArray,", "dtype of the array data. Examples -------- >>> from yt", "!= '': header += '\\n' header += \" Units\\n \"", "validate_numpy_wrapper_units(v, arrs): if not any(isinstance(a, YTArray) for a in arrs):", "unit, lut = state[0] except TypeError: # this case happens", "out, out_arr) else: raise RuntimeError( \"Support for the %s ufunc", "def handle_preserve_units(inps, units, ufunc, ret_class): if units[0] != units[1]: any_nonzero", "'equal') return super(YTArray, self).__eq__(oth) def __ne__(self, other): \"\"\" Test if", "def reciprocal_unit(unit): return unit**-1 def passthrough_unit(unit, unit2=None): return unit def", "ureg = UnitRegistry() >>> a = np.random.random(10) >>> b =", "or character separating columns. header : str, optional String that", "is compatible with this quantity. Returns Unit object. \"\"\" #", "quantity, try the :meth:`list_equivalencies` method. Default: None Returns ------- NumPy", "axisb=-1, axisc=-1, axis=None): \"\"\"Applies the cross product to two YT", "the data. \"\"\" return super(YTArray, self).__neg__() def __mul__(self, right_object): \"\"\"", "v in info.items(): d.attrs[k] = v f.close() @classmethod def from_hdf5(cls,", "@return_arr def sum(self, axis=None, dtype=None, out=None): return super(YTArray, self).sum(axis, dtype,", "is not equal to the object on the right. \"\"\"", "resulting array; default: float. delimiter : str, optional The string", "system to be used in the conversion. If not specified,", "def to(self, units, equivalence=None, **kwargs): \"\"\" An alias for YTArray.in_units().", "comparison_unit, less: comparison_unit, less_equal: comparison_unit, not_equal: comparison_unit, equal: comparison_unit, logical_and:", "is not a YTArray, then one of the arrays must", "return False return True def return_arr(func): @wraps(func) def wrapped(*args, **kwargs):", "\"\"\" See __add__. \"\"\" lo = sanitize_units_add(self, left_object, \"addition\") return", "def __neg__(self): \"\"\" Negate the data. \"\"\" return super(YTArray, self).__neg__()", "oth, out=self) return self def __or__(self, right_object): return super(YTArray, self).__or__(right_object)", "# Check that other is a YTArray. if hasattr(other, 'units'):", "= str(unit).replace(\"yr\",\"year\") units.append(\"%s**(%s)\" % (unit, Rational(pow))) units = \"*\".join(units) return", "defined for YTArray instances\") def bitop_units(unit1, unit2): raise TypeError( \"Bit-twiddling", "if unit2 is None and ufunc is not power: unit2", "3.08600000e+24, 3.08600000e+24, 3.08600000e+24]) cm This is equivalent to: >>> b", "Check that the other is a YTArray. if other is", "= Unit(registry=getattr(unit1, 'registry', None)) elif ufunc is power: unit2 =", "for all numpy versions, see # numpy issue #9081 return", "the sympy expression 1/1 rather than # a dimensionless Unit", "return str(self.view(np.ndarray)) + ' ' + str(self.units) # # Start", "context[0] inputs = context[1] if ufunc in unary_operators: out_arr, inp,", "def wrapped(*args, **kwargs): ret, units = func(*args, **kwargs) if ret.shape", "= YTQuantity(12, 'g/cm**3') >>> np.abs(a) 12 g/cm**3 and strip them", "group to write the arrays to. If not specified, the", ".. note:: All additional keyword arguments are passed to the", "units kwarg set, but it's not a Unit object. #", "any([isinstance(o, YTArray) for o in input_object]): ff = getattr(input_object[0], 'units',", "axis parameter specifies the index of the new axis in", "if any([isinstance(o, YTArray) for o in input_object]): ff = getattr(input_object[0],", "system's base units. Parameters ---------- unit_system : string, optional The", "units = handle_comparison_units( inps, units, ufunc, ret_class) elif unit_operator is", "not any(isinstance(a, YTArray) for a in arrs): return v if", "YTQuantity(dot, units) return YTArray(dot, units) def uvstack(arrs): \"\"\"Stack arrays in", "the object # under consideration, convert so we don't mix", "= pickle.loads(dataset.attrs['unit_registry'].tostring()) else: unit_lut = None f.close() registry = UnitRegistry(lut=unit_lut,", "in [(k, v) for k, v in lut.items() if len(v)", "cm^3). registry : A UnitRegistry object The registry to create", "logaddexp: return_without_unit, logaddexp2: return_without_unit, true_divide: divide_units, floor_divide: divide_units, negative: passthrough_unit,", "multiplied. \"\"\" ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__mul__(ro) def", "= E.in_base(unit_system=\"galactic\") \"\"\" return self.in_units(self.units.get_base_equivalent(unit_system)) def in_cgs(self): \"\"\" Creates a", "Explicitly # casting to YTArray avoids creating a YTQuantity with", "array and units to the equivalent cgs units. \"\"\" return", "def in_cgs(self): \"\"\" Creates a copy of this array with", "sanitize_units_mul(self, other) np.floor_divide(self, oth, out=self) return self def __or__(self, right_object):", "returned tuple, itself a tuple used to restore the state", "is any whitespace. usecols : sequence, optional Which columns to", "= data._determine_fields(field)[0] if isinstance(field, tuple): finfo = data.ds._get_field_info(field[0],field[1]) else: finfo", "= validate_numpy_wrapper_units(v, [arr1, arr2]) return v def uunion1d(arr1, arr2): \"\"\"Find", "results in all columns being read. comments : str, optional", "supplied, the default one will be used. Examples -------- >>>", "Rational from yt.units.unit_lookup_table import \\ default_unit_symbol_lut from yt.units.equivalencies import equivalence_registry", "passthrough_unit, isnat: return_without_unit, heaviside: preserve_units, } __array_priority__ = 2.0 def", "in unary_operators: out_arr, inp, u = get_inp_u_unary(ufunc, inputs, out_arr) unit", "the `+` operator. Must check for the correct (same dimension)", "See __mul__. \"\"\" oth = sanitize_units_mul(self, other) np.multiply(self, oth, out=self)", ">>> a.in_cgs() 1.543e+25 cm This is equivalent to: >>> b", "your arrays are YTArrays.\") a1 = arrs[0] if not all(a.units", "3.]) cm \"\"\" v = np.intersect1d(arr1, arr2, assume_unique=assume_unique) v =", "offset: np.subtract(new_array, offset*new_array.uq, new_array) return new_array else: return self.to_equivalent(units, equivalence,", "lookup table # into the pickle file unit, lut =", "= \"CGS\" raise YTEquivalentDimsError(my_units, other_units, base) if not my_units.same_dimensions_as(other_units): raise", "def validate_comparison_units(this, other, op_string): # Check that other is a", "return self def __and__(self, right_object): return super(YTArray, self).__and__(right_object) def __rand__(self,", "g = f.create_group(group_name) else: g = f if dataset_name in", "elif isinstance(input_array, np.ndarray): pass elif iterable(input_array) and input_array: if isinstance(input_array[0],", "is None: return True oth = validate_comparison_units(self, other, 'not equal')", "floating point scalar The scalar to attach units to input_units", "mod, absolute, rint, \\ sign, conj, exp, exp2, log, log2,", "from yt.units.dimensions import \\ angle, \\ current_mks, \\ dimensionless, \\", "AstroPy installed, so you can't convert to \" + \"an", "power_unit(unit, power): return unit**power @lru_cache(maxsize=128, typed=False) def square_unit(unit): return unit*unit", "axis in the dimensions of the result. For example, if", "would be annoying to deal with them. >>> print(np.log10(a)) 1.07918124605", "note:: All additional keyword arguments are passed to the equivalency,", "arccosh, arctanh, deg2rad, rad2deg, invert, logical_not, isreal, iscomplex, isfinite, isinf,", "with data converted to mks units. \"\"\" return self.in_units(self.units.get_mks_equivalent()) def", "info.items(): d.attrs[k] = v f.close() @classmethod def from_hdf5(cls, filename, dataset_name=None,", "g = f[group_name] else: g = f dataset = g[dataset_name]", "str(unit).replace(\"yr\",\"year\") units.append(\"%s**(%s)\" % (unit, Rational(pow))) units = \"*\".join(units) return unit_registry.Quantity(self.value,", ">>> uunion1d(A, B) YTArray([ 1., 2., 3., 4.]) cm \"\"\"", "of the hdf5 file. dataset_name: string The name of the", "u = NULL_UNIT if u.dimensions is angle and ufunc in", "other. \"\"\" # Check that the other is a YTArray.", "try the :meth:`list_equivalencies` method. Default: None Returns ------- NumPy array", "ret_class, raise_error=True) unit = unit_operator(*units) if unit_operator in (multiply_units, divide_units):", "(same dimension) units. \"\"\" ro = sanitize_units_add(self, right_object, \"subtraction\") return", "__neg__(self): \"\"\" Negate the data. \"\"\" return super(YTArray, self).__neg__() def", "def handle_comparison_units(inps, units, ufunc, ret_class, raise_error=False): if units[0] != units[1]:", "possible equivalencies associated with this YTArray or YTQuantity. \"\"\" self.units.list_equivalencies()", "tanh, arcsinh, arccosh, arctanh, deg2rad, rad2deg, \\ bitwise_and, bitwise_or, bitwise_xor,", "handle_multiply_divide_units( unit, units, out, out_arr) else: raise RuntimeError( \"Support for", "@classmethod def from_pint(cls, arr, unit_registry=None): \"\"\" Convert a Pint \"Quantity\"", "of this array with the data in the equivalent cgs", "v = np.cross(arr1, arr2, axisa=axisa, axisb=axisb, axisc=axisc, axis=axis) units =", "oth, out=self) return self def __floordiv__(self, right_object): ro = sanitize_units_mul(self,", "*units) else: if raise_error: raise YTUfuncUnitError(ufunc, *units) inps = (inps[0],", "type obj = np.asarray(input_array, dtype=dtype).view(cls) # Check units type if", "data in the equivalent mks units, and returns it. Returns", "ret = YTArray.__new__(cls, input_scalar, input_units, registry, dtype=dtype, bypass_validation=bypass_validation) if ret.size", "ff = getattr(input_object[0], 'units', NULL_UNIT, ) if any([ff != getattr(_,", "arr = copy.deepcopy(x) arr.convert_to_units(units) return arr if isinstance(x, np.ndarray): return", "raise YTInvalidUnitEquivalence(equiv, self.units, unit) else: return new_arr.in_units(unit) else: raise YTInvalidUnitEquivalence(equiv,", "do something like this instead: \\n\" \"ds.arr(%s, \\\"%s\\\")\" % (input_array,", "from distutils.version import LooseVersion from functools import wraps from numpy", "Quantity object with data converted to cgs units. \"\"\" return", "return super(YTArray, self).__rand__(left_object) def __iand__(self, other): np.bitwise_and(self, other, out=self) return", "if we can get away with it. if d.shape ==", "= a1.units return v def uconcatenate(arrs, axis=0): \"\"\"Concatenate a sequence", "and input_array: if isinstance(input_array[0], YTArray): return YTArray(np.array(input_array, dtype=dtype), input_array[0].units, registry=registry)", "return self.convert_to_units(self.units.get_base_equivalent(unit_system)) def convert_to_cgs(self): \"\"\" Convert the array and units", "\"\"\"Applies the cross product to two YT arrays. This wrapper", "units) return YTArray(dot, units) def uvstack(arrs): \"\"\"Stack arrays in sequence", "`/` operator. \"\"\" ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__div__(ro)", "@lru_cache(maxsize=128, typed=False) def sqrt_unit(unit): return unit**0.5 @lru_cache(maxsize=128, typed=False) def multiply_units(unit1,", "YTQuantity): return float(v) else: return v def in_base(self, unit_system=\"cgs\"): \"\"\"", "bitwise_xor: bitop_units, invert: invert_units, left_shift: bitop_units, right_shift: bitop_units, greater: comparison_unit,", "dtype=dtype).view(cls) obj.units = input_units if registry is not None: obj.units.registry", "object on the right of the `-` from this ytarray.", "units attribute, attempt to infer units as well. group_name: string", "'units', None) ret_class = get_binary_op_return_class(type(inp1), type(inp2)) if unit1 is None:", "to YTArray avoids creating a YTQuantity with # size >", "and method == 'reduce': power_sign = POWER_SIGN_MAPPING[ufunc] if 'axis' in", "If set, input_units *must* be a valid unit object. Defaults", "this because AstroPy is silly and defines # hour as", "out), self.units @return_arr def sum(self, axis=None, dtype=None, out=None): return super(YTArray,", "return super(YTArray, self).__radd__(lo) def __iadd__(self, other): \"\"\" See __add__. \"\"\"", "ustack(arrs, axis=0): \"\"\"Join a sequence of arrays along a new", "Converting from AstroPy Quantity u = arr.unit ap_units = []", "b + a YTArray([ 4.01, 5.02, 6.03]) m NumPy ufuncs", "self.units, bypass_validation=True) else: if hasattr(self, 'units'): ret.units = self.units return", "len(col_words) break except ValueError: mylog.warning(\"Unrecognized character at beginning of line:", "def unorm(data, ord=None, axis=None, keepdims=False): \"\"\"Matrix or vector norm that", "Convert the array and units to the equivalent base units", "return_without_unit, isinf: return_without_unit, isnan: return_without_unit, signbit: return_without_unit, copysign: passthrough_unit, nextafter:", "= registry return obj if input_array is NotImplemented: return input_array.view(cls)", "ufunc in binary_operators: unit_operator = self._ufunc_registry[context[0]] inps, units, ret_class =", "np.all(a == b) True \"\"\" def __new__(cls, input_scalar, input_units=None, registry=None,", "the other is a YTArray. if other is None: return", "The Quantity to convert from. unit_registry : yt UnitRegistry, optional", "arccos: return_without_unit, arctan: return_without_unit, arctan2: arctan2_unit, arcsinh: return_without_unit, arccosh: return_without_unit,", "character separating columns. header : str, optional String that will", "= NULL_UNIT if u.dimensions is angle and ufunc in trigonometric_operators:", "\"\"\" Test if this is equal to the object on", "string, optional The equivalence you wish to use. To see", "YTUnitOperationError, YTUnitConversionError, \\ YTUfuncUnitError, YTIterableUnitCoercionError, \\ YTInvalidUnitEquivalence, YTEquivalentDimsError from yt.utilities.lru_cache", "try: return type(self)(new_arr[0], new_arr[1]).in_units(unit) except YTUnitConversionError: raise YTInvalidUnitEquivalence(equiv, self.units, unit)", "for o in input_object]): ff = getattr(input_object[0], 'units', NULL_UNIT, )", "**kwargs).value if isinstance(self, YTQuantity): return float(v) else: return v def", "or sequence of strs, optional A single format (%10.5f), or", "a.to_pint() \"\"\" from pint import UnitRegistry if unit_registry is None:", "it without units. Output is therefore a bare NumPy array.", "unit_registry=None): \"\"\" Convert an AstroPy \"Quantity\" to a YTArray or", "return_without_unit, tan: return_without_unit, sinh: return_without_unit, cosh: return_without_unit, tanh: return_without_unit, arcsin:", "if registry is not None: obj.units.registry = registry return obj", "np.ma.masked_array) or issubclass(cls1, (numeric_type, np.number, list, tuple)): return cls2 if", "this YTArray by the object on the right of the", "equivalence=None, **kwargs): \"\"\" An alias for YTArray.in_units(). See the docstrings", "v in lut.items() if len(v) == 2]: lut[k] = v", "elif unit_operator is preserve_units: inps, units = handle_preserve_units( inps, units,", "u2d]): if not units[0].same_dimensions_as(units[1]): raise YTUnitOperationError(ufunc, *units) else: if raise_error:", ") trigonometric_operators = ( sin, cos, tan, ) class YTArray(np.ndarray):", "Unit(input_units, registry=registry) return ret elif isinstance(input_array, np.ndarray): pass elif iterable(input_array)", "`units` # attribute. if isinstance(ret, YTArray): if not inp.units.same_dimensions_as(ret.units): #", "data converted to mks units. \"\"\" return self.in_units(self.units.get_mks_equivalent()) def to_equivalent(self,", "with unit information to a text file. Parameters ---------- fname", "\\ em_dimensions from yt.utilities.exceptions import \\ YTUnitOperationError, YTUnitConversionError, \\ YTUfuncUnitError,", "len(inputs))) if unit is None: out_arr = np.array(out_arr, copy=False) elif", "cos, tan, ) class YTArray(np.ndarray): \"\"\" An ndarray subclass that", "of that function for details. \"\"\" return self.in_units(units, equivalence=equivalence, **kwargs)", "fabs: passthrough_unit, rint: return_without_unit, sign: return_without_unit, conj: passthrough_unit, exp: return_without_unit,", "= Unit(unit, registry=self.units.registry) if self.units.same_dimensions_as(conv_unit): return self.in_units(conv_unit) this_equiv = equivalence_registry[equiv]()", "is a YTArray, so it can't be None. return False", "\"\"\" import h5py from yt.extern.six.moves import cPickle as pickle if", "registry=registry) # # Start convenience methods # @property def value(self):", "by only a constant factor but not in the same", "self).__getitem__(item) if ret.shape == (): return YTQuantity(ret, self.units, bypass_validation=True) else:", "and isinstance(input_units, (str, bytes)): if input_units.startswith('code_'): raise UnitParseError( \"Code units", "yt.utilities.exceptions import \\ YTUnitOperationError, YTUnitConversionError, \\ YTUfuncUnitError, YTIterableUnitCoercionError, \\ YTInvalidUnitEquivalence,", "loadtxt(fname, dtype='float', delimiter='\\t', usecols=None, comments='#'): r\"\"\" Load YTArrays with unit", "are datasets at the top level by default. \"\"\" import", "cls1 if issubclass(cls2, cls1): return cls2 else: raise RuntimeError(\"Undefined operation", "YTArray or YTQuantity to an equivalent, e.g., something that is", "data, or, if none is found, uses np.float64 bypass_validation :", "def __ge__(self, other): \"\"\" Test if this is greater than", "power.units.is_dimensionless: raise YTUnitOperationError('power', power.unit) # Work around a sympy issue", "inp = coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) # Make sure the", "we don't mix units with the same # dimensions. if", "dimensionless Unit object. if self.units.is_dimensionless and power == -1: ret", "arrays and quantities. \"\"\" if memodict is None: memodict =", "YTQuantity(2, 'm') >>> a + b 201.0 cm >>> b", "A wrapper around numpy.intersect1d that preserves units. All input arrays", "new_units = _unit_repr_check_same(self.units, units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units) self.units =", "__div__. \"\"\" oth = sanitize_units_mul(self, other) np.divide(self, oth, out=self) return", "np.array(out_arr, copy=False) elif ufunc in (modf, divmod_): out_arr = tuple((ret_class(o,", "top level by default. \"\"\" import h5py from yt.extern.six.moves import", "if not units[0].is_dimensionless: if units[0].dimensions == units[1].dimensions: out_arr = np.multiply(out_arr.view(np.ndarray),", "recognize # \"yr\" as \"year\" if str(unit).endswith(\"yr\") and len(str(unit)) in", "is a YTArray before we use the `units` # attribute.", "__div__(self, right_object): \"\"\" Divide this YTArray by the object on", "unit**0.5 @lru_cache(maxsize=128, typed=False) def multiply_units(unit1, unit2): return unit1 * unit2", "registry=unit_registry) else: return YTQuantity(arr.value, ap_units, registry=unit_registry) def to_astropy(self, **kwargs): \"\"\"", "Here we catch the first line of numbers try: col_words", "return ret def validate_comparison_units(this, other, op_string): # Check that other", "units. \"\"\" v = np.hstack(arrs) v = validate_numpy_wrapper_units(v, arrs) return", "the zeroth element of third element of the returned tuple,", "spacing: passthrough_unit, positive: passthrough_unit, divmod_: passthrough_unit, isnat: return_without_unit, heaviside: preserve_units,", "axis=None, dtype=None, out=None): return super(YTArray, self).mean(axis, dtype, out), self.units @return_arr", "object on the right. \"\"\" # Check that other is", "self def __or__(self, right_object): return super(YTArray, self).__or__(right_object) def __ror__(self, left_object):", "words[1:] if len(words) == 2 and words[1] == \"Units\": next_one", "E = YTQuantity(2.5, \"erg/s\") >>> E_new = E.in_base(unit_system=\"galactic\") \"\"\" return", "= inp.in_units('radian').v if out_arr is not None: out_arr = ufunc(inp).view(np.ndarray)", "unit_lut = pickle.loads(dataset.attrs['unit_registry'].tostring()) else: unit_lut = None f.close() registry =", "used. NOTE: This is not the same as a yt", "this here to avoid a circular import from yt.funcs def", "in the conversion. If not specified, the default base units", "file. dataset_name: string The name of the dataset to read", "other): \"\"\" Test if this is greater than the object", "comparison_unit, arctan2_unit): inps, units = handle_comparison_units( inps, units, ufunc, ret_class,", "out=out) unit = Unit(registry=unit.registry) return out, out_arr, unit def coerce_iterable_units(input_object):", "for the %s ufunc with %i inputs has not been\"", "%i inputs has not been\" \"added to YTArray.\" % (str(ufunc),", "file. comments : str, optional String that will be prepended", "we can get away with it. if d.shape == self.shape", "is therefore a bare NumPy array. Optionally, an equivalence can", "Test if this is greater than the object on the", "raise YTUnitOperationError(op_string, inp.units, ret.units) ret = ret.in_units(inp.units) else: # If", "registry=None, dtype=None, bypass_validation=False): if dtype is None: dtype = getattr(input_array,", "distutils.version import LooseVersion from functools import wraps from numpy import", "ldexp, fmod, divmod_, heaviside ) trigonometric_operators = ( sin, cos,", "dictionary A dictionary of supplementary info to write to append", "here to avoid a circular import from yt.funcs def iterable(obj):", "u = getattr(inp, 'units', None) if u is None: u", "# hour as \"h\" if unit_str == \"h\": unit_str =", "import \\ add, subtract, multiply, divide, logaddexp, logaddexp2, true_divide, \\", "True def return_arr(func): @wraps(func) def wrapped(*args, **kwargs): ret, units =", "array data. Defaults to the dtype of the input data,", "+ a 2.01 m NumPy ufuncs will pass through units", "logical_or, logical_xor, maximum, minimum, fmax, fmin, copysign, nextafter, ldexp, fmod,", "new axis while preserving units The axis parameter specifies the", "YTQuantity(norm, data.units) return YTArray(norm, data.units) def udot(op1, op2): \"\"\"Matrix or", "a1.units for a in arrs[1:]): raise RuntimeError(\"Your arrays must have", "return super(YTArray, self).__and__(right_object) def __rand__(self, left_object): return super(YTArray, self).__rand__(left_object) def", "= 1.0 return (inp1, inp2), (unit1, unit2), ret_class def handle_preserve_units(inps,", "power_unit, remainder: preserve_units, mod: preserve_units, fmod: preserve_units, absolute: passthrough_unit, fabs:", "except ImportError: positive, divmod_, isnat, heaviside = (None,)*4 from yt.units.unit_object", "then one of the arrays must be # dimensionless or", "it would be annoying to deal with them. >>> print(np.log10(a))", "must be scalars\") return ret def __repr__(self): return str(self) def", "isfinite: return_without_unit, isinf: return_without_unit, isnan: return_without_unit, signbit: return_without_unit, copysign: passthrough_unit,", "registry to use in the conversion. If one is not", "and convert a dataset in an hdf5 file into a", "__idiv__(self, other): \"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self, other)", ">>> from pint import UnitRegistry >>> import numpy as np", "% (unit_str, Rational(exponent))) ap_units = \"*\".join(ap_units) if isinstance(arr.value, np.ndarray): return", "self).__mul__(ro) def __rmul__(self, left_object): \"\"\" See __mul__. \"\"\" lo =", "size > 1 return YTArray(np.array(out_arr), unit) return ret_class(np.array(out_arr, copy=False), unit)", ">>> a.to_equivalent(\"keV\", \"thermal\") \"\"\" conv_unit = Unit(unit, registry=self.units.registry) if self.units.same_dimensions_as(conv_unit):", "cgs units. \"\"\" return self.in_units(self.units.get_cgs_equivalent()) def in_mks(self): \"\"\" Creates a", "UnitRegistry object. Examples -------- >>> a = YTQuantity(4.0, \"cm**2/s\") >>>", "be annoying to deal with them. >>> print(np.log10(a)) 1.07918124605 YTQuantity", "v = value @property def ndview(self): \"\"\"Get a view of", "{} info['units'] = str(self.units) info['unit_registry'] = np.void(pickle.dumps(self.units.registry.lut)) if dataset_name is", "method == 'reduce': power_sign = POWER_SIGN_MAPPING[ufunc] if 'axis' in kwargs", "log2, log10, expm1, log1p, sqrt, square, reciprocal, sin, cos, tan,", "= getattr(inp1, 'units', None) unit2 = getattr(inp2, 'units', None) ret_class", "information from a text file. Each row in the text", "object or string, optional The units you want to get", "the array, but as an ndarray rather than ytarray. Returns", "__add__. \"\"\" lo = sanitize_units_add(self, left_object, \"addition\") return super(YTArray, self).__radd__(lo)", "arcsinh, arccosh, arctanh, deg2rad, rad2deg, \\ bitwise_and, bitwise_or, bitwise_xor, invert,", "format (%10.5f), or a sequence of formats. delimiter : str,", "array and units to the equivalent base units in the", "__iand__(self, other): np.bitwise_and(self, other, out=self) return self def __pow__(self, power):", "the array data. Defaults to the dtype of the input", "return super(YTArray, self).__or__(right_object) def __ror__(self, left_object): return super(YTArray, self).__ror__(left_object) def", "'m') >>> a + b YTArray([ 401., 502., 603.]) cm", "around numpy.concatenate preserves units. All input arrays must have the", "typed=False) def _unit_repr_check_same(my_units, other_units): \"\"\" Takes a Unit object, or", "For example, ``usecols = (1,4,5)`` will extract the 2nd, 5th", "def to_ndarray(self): \"\"\" Creates a copy of this array with", "unit_system=\"cgs\"): \"\"\" Creates a copy of this array with the", "casting to YTArray avoids creating a YTQuantity with # size", "yt.units.equivalencies import equivalence_registry from yt.utilities.logger import ytLogger as mylog from", "__add__(self, right_object): \"\"\" Add this ytarray to the object on", "\"Bit-twiddling operators are not defined for YTArray instances\") def get_inp_u_unary(ufunc,", "= units[1].is_dimensionless any_nonzero = [np.any(inps[0]), np.any(inps[1])] if any_nonzero[0] == np.bool_(False):", "numeric\") ret = YTArray.__new__(cls, input_scalar, input_units, registry, dtype=dtype, bypass_validation=bypass_validation) if", "not equal to the object on the right. \"\"\" #", "add: preserve_units, subtract: preserve_units, multiply: multiply_units, divide: divide_units, logaddexp: return_without_unit,", "all(isinstance(a, YTArray) for a in arrs): raise RuntimeError(\"Not all of", "if input_array is NotImplemented: return input_array.view(cls) if registry is None", "arctan2_unit, arcsinh: return_without_unit, arccosh: return_without_unit, arctanh: return_without_unit, hypot: preserve_units, deg2rad:", "break except ValueError: mylog.warning(\"Unrecognized character at beginning of line: \\\"%s\\\".\"", "try to load an old pickle file # created before", "def __gt__(self, other): \"\"\" Test if this is greater than", "a Unit object. # don't handle all the cases here,", "of the file. comments : str, optional String that will", "return super(YTArray, self).__floordiv__(ro) def __rfloordiv__(self, left_object): \"\"\" See __div__. \"\"\"", "YTArray or YTQuantity to a Pint Quantity. Parameters ---------- arr", "in the file. info: dictionary A dictionary of supplementary info", "@return_arr def std(self, axis=None, dtype=None, out=None, ddof=0): return super(YTArray, self).std(axis,", "1, 0, 1, 2, 3]) g/cm**3 and strip them when", "ceil, trunc, fabs, spacing try: # numpy 1.13 or newer", "uunion1d(arr1, arr2): \"\"\"Find the union of two arrays. A wrapper", "return tuple([YTArray(arr, unit) for arr, unit in zip(arrays, units)]) def", "the `*` operator. The unit objects handle being multiplied. \"\"\"", "ddof=0): return super(YTArray, self).std(axis, dtype, out, ddof), self.units def __array_wrap__(self,", "yt import YTQuantity >>> a = YTQuantity(1, 'cm') >>> b", "return_arr(func): @wraps(func) def wrapped(*args, **kwargs): ret, units = func(*args, **kwargs)", "input_units else: # units kwarg set, but it's not a", "== self.dtype: d[...] = self for k in d.attrs.keys(): del", "'less_than') return super(YTArray, self).__lt__(oth) def __le__(self, other): \"\"\"Test if this", "unit_operator in (multiply_units, divide_units): out, out_arr, unit = handle_multiply_divide_units( unit,", "'not equal') return super(YTArray, self).__ne__(oth) def __ge__(self, other): \"\"\" Test", "dataset_name: string The name of the dataset to read from.", "ndarray rather than ytarray. Returns ------- View of this array's", "angle, \\ current_mks, \\ dimensionless, \\ em_dimensions from yt.utilities.exceptions import", "invert, logical_not, isreal, iscomplex, isfinite, isinf, isnan, signbit, floor, ceil,", "\"\"\" Test if this is not equal to the object", "that will be written at the end of the file.", "ret.units = Unit(input_units, registry=registry) return ret elif isinstance(input_array, np.ndarray): pass", "self.units*b.units def __reduce__(self): \"\"\"Pickle reduction method See the documentation for", "or string of known unit symbol, and check that it", "python syntax (cm**3, not cm^3). registry : ~yt.units.unit_registry.UnitRegistry The registry", "provided. Make dimensionless... units = Unit() elif isinstance(input_units, Unit): if", "have AstroPy installed, so you can't convert to \" +", "return cls1 if issubclass(cls2, cls1): return cls2 else: raise RuntimeError(\"Undefined", "\"addition\") return super(YTArray, self).__radd__(lo) def __iadd__(self, other): \"\"\" See __add__.", "= np.union1d(arr1, arr2) v = validate_numpy_wrapper_units(v, [arr1, arr2]) return v", ">>> c = yt.YTArray.from_pint(b) \"\"\" p_units = [] for base,", "% (str(ufunc), len(inputs))) if unit is None: out_arr = np.array(out_arr,", "= coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) # Make sure the other", "yt.units.unit_registry import UnitRegistry from yt.units.dimensions import \\ angle, \\ current_mks,", "left_shift: bitop_units, right_shift: bitop_units, greater: comparison_unit, greater_equal: comparison_unit, less: comparison_unit,", "np_ret[:2] + unit_state + np_ret[3:] return new_ret def __setstate__(self, state):", "else: raise YTUnitOperationError(ufunc, unit1, unit2) unit2 = 1.0 return (inp1,", "= data.ds._get_field_info(field) if finfo.sampling_type == 'particle': units = finfo.output_units else:", "unit_registry : yt UnitRegistry, optional A yt unit registry to", "inps, units = handle_preserve_units( inps, units, ufunc, ret_class) unit =", "return type(self)(super(YTArray, self).__pos__(), self.units) @return_arr def dot(self, b, out=None): return", "the :meth:`list_equivalencies` method. Examples -------- >>> a = yt.YTArray(1.0e7,\"K\") >>>", "This is necessary for stdlib deepcopy of arrays and quantities.", ">>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.arr(np.ones(5), 'code_length') >>>", "= sp[\"density\"] >>> b = sp[\"temperature\"] >>> c = sp[\"velocity_x\"]", "typed=False) def divide_units(unit1, unit2): return unit1/unit2 @lru_cache(maxsize=128, typed=False) def reciprocal_unit(unit):", "YTArray(input_object) return input_object else: return input_object def sanitize_units_mul(this_object, other_object): inp", "filled with zero if not np.any(other_object): return ret.view(np.ndarray) elif not", "f = h5py.File(filename) if group_name is not None: if group_name", "is not None: units = [units[col] for col in usecols]", "*= conversion_factor if offset: np.subtract(self, offset*self.uq, self) return self def", "used. \"\"\" # Converting from AstroPy Quantity u = arr.unit", "ret_class is YTQuantity: # This happens if you do ndarray", "\\ hypot, sinh, cosh, tanh, arcsinh, arccosh, arctanh, deg2rad, rad2deg,", "ytarray. Must check for the correct (same dimension) units. \"\"\"", "right_object): \"\"\" Subtract the object on the right of the", "the supplied units, and returns it without units. Output is", "Pint Quantity The Quantity to convert from. unit_registry : yt", "to dataset_name: string The name of the dataset to create", "is not input_units.registry: units = Unit(str(input_units), registry=registry) else: units =", "raise ImportError(\"You don't have AstroPy installed, so you can't convert", "issue #9081 return type(self)(super(YTArray, self).__pos__(), self.units) @return_arr def dot(self, b,", "an hdf5 file into a YTArray. Parameters ---------- filename: string", "in input_object]): ff = getattr(input_object[0], 'units', NULL_UNIT, ) if any([ff", "u = get_inp_u_unary(ufunc, inputs, out_arr) unit = self._ufunc_registry[context[0]](u) ret_class =", "str The file to write the YTArrays to. arrays :", "of numpy.cross for full details. \"\"\" v = np.cross(arr1, arr2,", "-1} # redefine this here to avoid a circular import", "return obj def __repr__(self): \"\"\" \"\"\" return super(YTArray, self).__repr__()+' '+self.units.__repr__()", "has_equivalent(self, equiv): \"\"\" Check to see if this YTArray or", "arctan2_unit(unit1, unit2): return NULL_UNIT def comparison_unit(unit1, unit2=None): return None def", "ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units def handle_comparison_units(inps, units, ufunc, ret_class,", "Convert an AstroPy \"Quantity\" to a YTArray or YTQuantity. Parameters", "return np.array(self) @classmethod def from_astropy(cls, arr, unit_registry=None): \"\"\" Convert an", "= type(self) elif ufunc in binary_operators: unit_operator = self._ufunc_registry[context[0]] inps,", "if hasattr(other, 'units'): if this.units.expr is other.units.expr: if this.units.base_value ==", "\"\"\" oth = sanitize_units_mul(self, other) np.true_divide(self, oth, out=self) return self", "yt.YTArray([2, 3, 4], 'cm') >>> uconcatenate((A, B)) YTArray([ 1., 2.,", "= UnitRegistry() >>> a = np.random.random(10) >>> b = ureg.Quantity(a,", "__array_ufunc__(self, ufunc, method, *inputs, **kwargs): func = getattr(ufunc, method) if", "ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>> a = ds.quan(5, 'code_length') >>> a.in_cgs()", "See __div__. \"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rfloordiv__(lo)", "\\ angle, \\ current_mks, \\ dimensionless, \\ em_dimensions from yt.utilities.exceptions", "less_equal, not_equal, equal, logical_and, \\ logical_or, logical_xor, logical_not, maximum, minimum,", "other): np.bitwise_and(self, other, out=self) return self def __pow__(self, power): \"\"\"", "(unit1, unit2), ret_class def handle_preserve_units(inps, units, ufunc, ret_class): if units[0]", "is YTQuantity: # This happens if you do ndarray *", "\".join(units)) return tuple([YTArray(arr, unit) for arr, unit in zip(arrays, units)])", "self.value else: v = self.in_units(units, equivalence=equivalence, **kwargs).value if isinstance(self, YTQuantity):", "the first line of numbers try: col_words = line.strip().split(delimiter) for", "unit_operator = self._ufunc_registry[ufunc] inps, units, ret_class = get_inp_u_binary(ufunc, inputs) if", "keepdims=keepdims) if norm.shape == (): return YTQuantity(norm, data.units) return YTArray(norm,", "**kwargs): \"\"\" Creates a copy of this array with the", "return ret_class(np.array(out_arr, copy=False), unit) else: # numpy version equal to", "without deleting if we can get away with it. if", "return super(YTArray, self).__ne__(oth) def __ge__(self, other): \"\"\" Test if this", "v if not all(isinstance(a, YTArray) for a in arrs): raise", "array and a value of 1.0\"\"\" return YTQuantity(1.0, self.units) uq", ":obj:`!iterable` A tuple, list, or array to attach units to", "same units. Parameters ---------- unit : string The unit that", "getattr(input_object[0], 'units', NULL_UNIT, ) if any([ff != getattr(_, 'units', NULL_UNIT)", "one will be used. Examples -------- >>> from pint import", "ret.shape == (): return ret[()] else: return ret ufunc =", "instances\") def get_inp_u_unary(ufunc, inputs, out_arr=None): inp = inputs[0] u =", "numpy 1.13 or newer from numpy import positive, divmod as", "any_nonzero[1] == np.bool_(False): units = (units[0], units[0]) elif not any([u1d,", "mylog.warning(\"Unrecognized character at beginning of line: \\\"%s\\\".\" % line[0]) f.close()", "other) np.divide(self, oth, out=self) return self def __truediv__(self, right_object): ro", "__ror__(self, left_object): return super(YTArray, self).__ror__(left_object) def __ior__(self, other): np.bitwise_or(self, other,", "units = Unit(input_units, registry=registry) # Attach the units obj.units =", "__div__. \"\"\" oth = sanitize_units_mul(self, other) np.true_divide(self, oth, out=self) return", "= unit_quantity @property def unit_array(self): \"\"\"Get a YTArray filled with", "Negate the data. \"\"\" return super(YTArray, self).__neg__() def __mul__(self, right_object):", "sinh, cosh, tanh, arcsinh, arccosh, arctanh, deg2rad, rad2deg, \\ bitwise_and,", "\"\"\" self.units.list_equivalencies() def has_equivalent(self, equiv): \"\"\" Check to see if", "will be used. \"\"\" # Converting from AstroPy Quantity u", "np.bool_(False): units = (units[0], units[0]) else: if not units[0].same_dimensions_as(units[1]): raise", "optional The character used to indicate the start of a", "self._ufunc_registry[context[0]] inps, units, ret_class = get_inp_u_binary(ufunc, inputs) if unit_operator in", "YTArray(arr.magnitude, p_units, registry=unit_registry) else: return YTQuantity(arr.magnitude, p_units, registry=unit_registry) def to_pint(self,", "other, op_string): # Check that other is a YTArray. if", "than the object on the right. \"\"\" # Check that", "b = a.to_pint() \"\"\" from pint import UnitRegistry if unit_registry", "a.in_cgs() 1.543e+25 cm This is equivalent to: >>> b =", "Convert a YTArray or YTQuantity to a Pint Quantity. Parameters", "return super(YTArray, self).__ror__(left_object) def __ior__(self, other): np.bitwise_or(self, other, out=self) return", "product that preserves units This is a wrapper around np.dot", "out=self) return self def __sub__(self, right_object): \"\"\" Subtract the object", "def __itruediv__(self, other): \"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self,", "powers_dict = self.units.expr.as_powers_dict() units = [] for unit, pow in", "deal with them. >>> np.log10(a) array([ -inf, 0. , 0.30103", "stdlib deepcopy of arrays and quantities. \"\"\" if memodict is", "!= units[1]: u1d = units[0].is_dimensionless u2d = units[1].is_dimensionless any_nonzero =", "new axis in the dimensions of the result. For example,", "( sin, cos, tan, ) class YTArray(np.ndarray): \"\"\" An ndarray", "reduction operators # @return_arr def prod(self, axis=None, dtype=None, out=None): if", "unit = self._ufunc_registry[context[0]](u) ret_class = type(self) elif ufunc in binary_operators:", "this is any whitespace. usecols : sequence, optional Which columns", "# # End comparison operators # # # Begin reduction", "issubclass(cls2, cls1): return cls2 else: raise RuntimeError(\"Undefined operation for a", "v = np.union1d(arr1, arr2) v = validate_numpy_wrapper_units(v, [arr1, arr2]) return", "units : Unit object or str The units you want", "np.ndarray): return YTArray(arr.value, ap_units, registry=unit_registry) else: return YTQuantity(arr.value, ap_units, registry=unit_registry)", "ufunc in (multiply, divide) and method == 'reduce': power_sign =", "= (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units)) return inps, units def handle_multiply_divide_units(unit, units,", "old pickle file # created before we serialized the unit", "np.ones_like(self) ua = unit_array def __getitem__(self, item): ret = super(YTArray,", "to be used in the conversion. If not specified, the", "u.dimensions is angle and ufunc in trigonometric_operators: inp = inp.in_units('radian').v", "str(self.units) # # Start unit conversion methods # def convert_to_units(self,", "Examples -------- >>> sp = ds.sphere(\"c\", (100,\"kpc\")) >>> a =", "return self.in_units(conv_unit) this_equiv = equivalence_registry[equiv]() oneway_or_equivalent = ( conv_unit.has_equivalent(equiv) or", "details. Examples -------- >>> A = yt.YTArray([1, 2, 3], 'cm')", "return v def unorm(data, ord=None, axis=None, keepdims=False): \"\"\"Matrix or vector", "np.dot that preserves units. \"\"\" dot = np.dot(op1.d, op2.d) units", "License. # # The full license is in the file", "YTQuantity(arr.value, ap_units, registry=unit_registry) def to_astropy(self, **kwargs): \"\"\" Creates a new", "import LooseVersion from functools import wraps from numpy import \\", "with it. if d.shape == self.shape and d.dtype == self.dtype:", "our class type obj = np.asarray(input_array, dtype=dtype).view(cls) # Check units", "as np >>> a = YTQuantity(12, 'g/cm**3') >>> np.abs(a) 12", "units. See the documentation for that function for descriptions of", "\"\"\" _ufunc_registry = { add: preserve_units, subtract: preserve_units, multiply: multiply_units,", "Unit(other_units, registry=my_units.registry) equiv_dims = em_dimensions.get(my_units.dimensions, None) if equiv_dims == other_units.dimensions:", "has the same dimensions as the object # under consideration,", "to convert to. \"\"\" new_units = _unit_repr_check_same(self.units, units) (conversion_factor, offset)", "return cls2 if issubclass(cls2, YTQuantity): return cls1 if issubclass(cls1, cls2):", "in the supplied units, and returns it. Optionally, an equivalence", "metadata extracted in __reduce__ and then serialized by pickle. \"\"\"", "object, or astropy units The units of the quantity. Powers", "' ' + str(self.units) # # Start unit conversion methods", "2.01 m NumPy ufuncs will pass through units where appropriate.", "uintersect1d(A, B) YTArray([ 2., 3.]) cm \"\"\" v = np.intersect1d(arr1,", "3, 4], 'cm') >>> uunion1d(A, B) YTArray([ 1., 2., 3.,", "\"Bit-twiddling operators are not defined for YTArray instances\") def bitop_units(unit1,", "out_arr = func(np.asarray(inps[0]), np.asarray(inps[1]), out=out, **kwargs) if unit_operator in (multiply_units,", "same unit and shape as this array\"\"\" return np.ones_like(self) ua", "+ np_ret[3:] return new_ret def __setstate__(self, state): \"\"\"Pickle setstate method", "Unit() POWER_SIGN_MAPPING = {multiply: 1, divide: -1} # redefine this", "is None: dtype = getattr(input_array, 'dtype', np.float64) if bypass_validation is", "in zip(arrays, units)]) def savetxt(fname, arrays, fmt='%.18e', delimiter='\\t', header='', footer='',", "changed if len(lut['m']) == 2: lut.update(default_unit_symbol_lut) for k, v in", "Convert a YTArray or YTQuantity to an equivalent, e.g., something", "'\\ ') + '}') registry = UnitRegistry(lut=lut, add_default_symbols=False) self.units =", "scalars\") return ret def __repr__(self): return str(self) def validate_numpy_wrapper_units(v, arrs):", "\"\"\" ro = sanitize_units_add(self, right_object, \"addition\") return super(YTArray, self).__add__(ro) def", "sanitize_units_mul(self, left_object) return super(YTArray, self).__rdiv__(lo) def __idiv__(self, other): \"\"\" See", "angle and ufunc in trigonometric_operators: inp = inp.in_units('radian').v if out_arr", ": :obj:`!iterable` A tuple, list, or array to attach units", "This is equivalent to: >>> b = YTArray(np.ones(5), 'code_length', registry=ds.unit_registry)", "ret.size > 1: raise RuntimeError(\"YTQuantity instances must be scalars\") return", "\"\"\" oth = sanitize_units_mul(self, other) np.multiply(self, oth, out=self) return self", ">>> a + b 201.0 cm >>> b + a", "to mks units. \"\"\" return self.in_units(self.units.get_mks_equivalent()) def to_equivalent(self, unit, equiv,", "if cls1 is cls2: return cls1 if cls1 in (np.ndarray,", "UnitRegistry if unit_registry is None: unit_registry = UnitRegistry() powers_dict =", "product to two YT arrays. This wrapper around numpy.cross preserves", "if 'axis' in kwargs and kwargs['axis'] is not None: unit", "wrapper around numpy.intersect1d that preserves units. All input arrays must", "around numpy.cross preserves units. See the documentation of numpy.cross for", "oneway_or_equivalent = ( conv_unit.has_equivalent(equiv) or this_equiv._one_way) if self.has_equivalent(equiv) and oneway_or_equivalent:", "is not the same as a yt UnitRegistry object. Examples", "== comments: if next_one: units = words[1:] if len(words) ==", "copy of the data in the iterable. return YTArray(input_object) return", "dtype=dtype).view(cls) # Check units type if input_units is None: #", "# Check units type if input_units is None: # Nothing", "units = handle_comparison_units( inps, units, ufunc, ret_class, raise_error=True) unit =", "if input_units is None: if registry is None: ret.units =", "inp = coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) # If the other", "and a value of 1.0\"\"\" return YTQuantity(1.0, self.units) uq =", "have the same units. See the documentation of numpy.concatenate for", "supplied units, and returns it without units. Output is therefore", "arrays. This wrapper around numpy.cross preserves units. See the documentation", "str, optional String that will be prepended to the ``header``", "same dimensions as the object # under consideration, convert so", "by default. \"\"\" import h5py from yt.extern.six.moves import cPickle as", "= YTArray([4, 5, 6], 'm') >>> a + b YTArray([", "\"Code units used without referring to a dataset. \\n\" \"Perhaps", "documentation for that function for descriptions of the keyword arguments.", "[a,b,c], header='My sphere stuff', delimiter=\"\\t\") \"\"\" if not isinstance(arrays, list):", "= np.array(out_arr, copy=False) elif ufunc in (modf, divmod_): out_arr =", "unit = unit_operator(*units) if unit_operator in (multiply_units, divide_units): out_arr, out_arr,", "np.hstack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v def ustack(arrs, axis=0):", "data in the specified unit system, and returns it in", "YTArray class. \"\"\" from __future__ import print_function #----------------------------------------------------------------------------- # Copyright", "return data.ds.arr(x, units) else: return data.ds.quan(x, units) def get_binary_op_return_class(cls1, cls2):", "def in_units(self, units, equivalence=None, **kwargs): \"\"\" Creates a copy of", ">>> import numpy as np >>> a = YTQuantity(12, 'g/cm**3')", "offset*new_array.uq, new_array) return new_array else: return self.to_equivalent(units, equivalence, **kwargs) def", "to input_units : String unit specification, unit symbol object, or", "the dimensions of the result. For example, if ``axis=0`` it", "units The axis parameter specifies the index of the new", "older than numpy 1.10.0. \"\"\" if LooseVersion(np.__version__) < LooseVersion('1.10.0'): norm", "Make dimensionless... units = Unit() elif isinstance(input_units, Unit): if registry", "__array_finalize__(self, obj): if obj is None and hasattr(self, 'units'): return", "1: out_arr = YTQuantity(np.asarray(out_arr), unit) else: if ret_class is YTQuantity:", "input_units is None: # Nothing provided. Make dimensionless... units =", "None: g = f[group_name] else: g = f dataset =", "the two input arrays. A wrapper around numpy.intersect1d that preserves", "the terms of the Modified BSD License. # # The", "return_without_unit, rad2deg: return_without_unit, bitwise_and: bitop_units, bitwise_or: bitop_units, bitwise_xor: bitop_units, invert:", "def __array_finalize__(self, obj): if obj is None and hasattr(self, 'units'):", "arr = YTArray(v, units, registry=registry) return arr def uintersect1d(arr1, arr2,", "don't mix units with the same # dimensions. if isinstance(ret,", "units return obj def __repr__(self): \"\"\" \"\"\" return super(YTArray, self).__repr__()+'", "shape as this array\"\"\" return np.ones_like(self) ua = unit_array def", "\\ sign, conj, exp, exp2, log, log2, log10, expm1, log1p,", "details. \"\"\" v = np.cross(arr1, arr2, axisa=axisa, axisb=axisb, axisc=axisc, axis=axis)", "units) else: return data.ds.quan(x, units) def get_binary_op_return_class(cls1, cls2): if cls1", "unit2): return unit1/unit2 @lru_cache(maxsize=128, typed=False) def reciprocal_unit(unit): return unit**-1 def", "heaviside: preserve_units, } __array_priority__ = 2.0 def __new__(cls, input_array, input_units=None,", "line.strip().split(delimiter) for word in col_words: float(word) num_cols = len(col_words) break", "the array data as a numpy ndarray\"\"\" return np.array(self) v", "ceil, trunc, modf, frexp, fabs, spacing, positive, isnat, ) binary_operators", "installed is older than numpy 1.10.0. \"\"\" if LooseVersion(np.__version__) <", "the lut if the pickle was saved prior to PR", "super(YTArray, self).__le__(oth) def __eq__(self, other): \"\"\" Test if this is", "# we have to do this because AstroPy is silly", "super(YTArray, self).__ne__(oth) def __ge__(self, other): \"\"\" Test if this is", "preserves units. See the documentation for that function for descriptions", "an equivalent quantity which is not in the same dimensions.", ">>> myinfo = {'field':'dinosaurs', 'type':'field_data'} >>> a.write_hdf5('test_array_data.h5', dataset_name='dinosaurs', ... info=myinfo)", "to: >>> b = YTQuantity(5, 'code_length', registry=ds.unit_registry) >>> np.all(a ==", "pickle file # created before we serialized the unit symbol", "+ '}') registry = UnitRegistry(lut=lut, add_default_symbols=False) self.units = Unit(unit, registry=registry)", "coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) # If the other object is", "\"\"\" return self.in_units(self.units.get_cgs_equivalent()) def in_mks(self): \"\"\" Creates a copy of", ": ~yt.units.unit_registry.UnitRegistry The registry to create units from. If input_units", "If the dataset has a units attribute, attempt to infer", "the cross product to two YT arrays. This wrapper around", "import copy import numpy as np from distutils.version import LooseVersion", "None: if registry is None: ret.units = input_array.units else: units", "if isinstance(field, tuple): finfo = data.ds._get_field_info(field[0],field[1]) else: finfo = data.ds._get_field_info(field)", "unit) def list_equivalencies(self): \"\"\" Lists the possible equivalencies associated with", "arr.convert_to_units(units) return arr if isinstance(x, np.ndarray): return data.ds.arr(x, units) else:", "None and ufunc is not power: unit2 = Unit(registry=getattr(unit1, 'registry',", "# created before we serialized the unit symbol lookup table", "used without referring to a dataset. \\n\" \"Perhaps you meant", "tightly integrated with yt datasets: >>> import yt >>> ds", "---------- fname : str The file to write the YTArrays", "formats. delimiter : str, optional String or character separating columns.", "YTArray before we use the `units` # attribute. if isinstance(ret,", ": sequence, optional Which columns to read, with 0 being", "\"cm**2/s\") >>> b = a.to_pint() \"\"\" from pint import UnitRegistry", "(units[0], units[0]) elif not any([u1d, u2d]): if not units[0].same_dimensions_as(units[1]): raise", "input_object if iterable(input_object): if any([isinstance(o, YTArray) for o in input_object]):", "the pickle format changed if len(lut['m']) == 2: lut.update(default_unit_symbol_lut) for", "# Distributed under the terms of the Modified BSD License.", "return unit1 @lru_cache(maxsize=128, typed=False) def power_unit(unit, power): return unit**power @lru_cache(maxsize=128,", "return unit def return_without_unit(unit, unit2=None): return None def arctan2_unit(unit1, unit2):", "units) else: # This could be a subclass, so don't", "conversion. If one is not supplied, the default one will", "bitop_units(unit1, unit2): raise TypeError( \"Bit-twiddling operators are not defined for", "NULL_UNIT) for _ in input_object]): raise YTIterableUnitCoercionError(input_object) # This will", "dataset_name: string The name of the dataset to create in", "= get_inp_u_binary(ufunc, inputs) if unit_operator in (preserve_units, comparison_unit, arctan2_unit): inps,", "if the version of numpy installed is older than numpy", "yt.extern.six.moves import cPickle as pickle if dataset_name is None: dataset_name", "self).__pow__(power) return type(self)(ret, input_units='') return super(YTArray, self).__pow__(power) def __abs__(self): \"\"\"", "arrs) return v def ucross(arr1, arr2, registry=None, axisa=-1, axisb=-1, axisc=-1,", "version equal to or newer than 1.13 def __array_ufunc__(self, ufunc,", "\"\"\" return self.in_units(self.units.get_mks_equivalent()) def to_equivalent(self, unit, equiv, **kwargs): \"\"\" Convert", "from yt.funcs def iterable(obj): try: len(obj) except: return False return", "rint: return_without_unit, sign: return_without_unit, conj: passthrough_unit, exp: return_without_unit, exp2: return_without_unit,", "YTArray): if not inp.units.same_dimensions_as(ret.units): # handle special case of adding", "if hasattr(self, 'units'): ret.units = self.units return ret # #", "else: unit = self._ufunc_registry[ufunc](u) ret_class = type(self) elif len(inputs) ==", "Quantity object with data converted to mks units. \"\"\" return", "out=self) return self def __pow__(self, power): \"\"\" Raise this YTArray", "YTArray(np.ones(5), 'code_length', registry=ds.unit_registry) >>> np.all(a == b) True \"\"\" _ufunc_registry", "units, ufunc, ret_class): if units[0] != units[1]: any_nonzero = [np.any(inps[0]),", "False. Examples -------- >>> from yt import YTArray >>> a", "np.abs(a) YTArray([4, 3, 2, 1, 0, 1, 2, 3]) g/cm**3", "g.keys(): d = g[dataset_name] # Overwrite without deleting if we", "reduction method See the documentation for the standard library pickle", "has not been\" \"added to YTArray.\" % (str(ufunc), len(inputs))) if", "p_units.append(\"%s**(%s)\" % (bs, Rational(exponent))) p_units = \"*\".join(p_units) if isinstance(arr.magnitude, np.ndarray):", "than the object on the right. \"\"\" # converts if", "== (): return YTQuantity(dot, units) return YTArray(dot, units) def uvstack(arrs):", "= coerce_iterable_units(inputs[1]) unit1 = getattr(inp1, 'units', None) unit2 = getattr(inp2,", "ret_class) unit = unit_operator(*units) out_arr = func(np.asarray(inps[0]), np.asarray(inps[1]), out=out, **kwargs)", "used. Examples -------- >>> from pint import UnitRegistry >>> import", "preserves units. \"\"\" v = np.hstack(arrs) v = validate_numpy_wrapper_units(v, arrs)", "out=self) return self def __neg__(self): \"\"\" Negate the data. \"\"\"", "inp.units, dimensionless) return ret def validate_comparison_units(this, other, op_string): # Check", "d.attrs.keys(): del d.attrs[k] else: del f[dataset_name] d = g.create_dataset(dataset_name, data=self)", "preserve_units, mod: preserve_units, fmod: preserve_units, absolute: passthrough_unit, fabs: passthrough_unit, rint:", "ret.shape != (): ret = ret.view(YTArray) if context is None:", "comments: if next_one: units = words[1:] if len(words) == 2", "to the object on the right. \"\"\" # Check that", "isnan: return_without_unit, signbit: return_without_unit, copysign: passthrough_unit, nextafter: preserve_units, modf: passthrough_unit,", "\"addition\") np.add(self, oth, out=self) return self def __sub__(self, right_object): \"\"\"", "bitwise_and, bitwise_or, bitwise_xor, invert, left_shift, right_shift, \\ greater, greater_equal, less,", "YTArray(dot, units) def uvstack(arrs): \"\"\"Stack arrays in sequence vertically (row", "= func(np.asarray(inp), out=out, **kwargs) if ufunc in (multiply, divide) and", "def convert_to_mks(self): \"\"\" Convert the array and units to the", "= g.create_dataset(dataset_name, data=self) for k, v in info.items(): d.attrs[k] =", "dimensionless... units = Unit() elif isinstance(input_units, Unit): if registry and", "p_units = [] for base, exponent in arr._units.items(): bs =", "positive, divmod_, isnat, heaviside = (None,)*4 from yt.units.unit_object import Unit,", "2013, yt Development Team. # # Distributed under the terms", "be a subclass, so don't call YTArray directly. return type(args[0])(ret,", "is skipped. Using this option may produce corrupted, invalid units", "dataset_name = 'array_data' f = h5py.File(filename) if group_name is not", "the result. For example, if ``axis=0`` it will be the", "unit_quantity(self): \"\"\"Get a YTQuantity with the same unit as this", "info=None, group_name=None): r\"\"\"Writes a YTArray to hdf5 file. Parameters ----------", "arr2): \"\"\"Find the union of two arrays. A wrapper around", "array filled with zero if not np.any(other_object): return ret.view(np.ndarray) elif", "v = np.hstack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v def", "line.strip().split() if len(words) == 0: continue if line[0] == comments:", "in dataset.attrs.keys(): unit_lut = pickle.loads(dataset.attrs['unit_registry'].tostring()) else: unit_lut = None f.close()", "self).__ror__(left_object) def __ior__(self, other): np.bitwise_or(self, other, out=self) return self def", "'code_length', registry=ds.unit_registry) >>> np.all(a == b) True \"\"\" def __new__(cls,", "a unit. Parameters ---------- input_scalar : an integer or floating", "= h5py.File(filename) if group_name is not None: if group_name in", "return out_arr, inp, u def get_inp_u_binary(ufunc, inputs): inp1 = coerce_iterable_units(inputs[0])", "+ a YTArray([ 4.01, 5.02, 6.03]) m NumPy ufuncs will", "from yt.extern.six.moves import cPickle as pickle if info is None:", "obj.units = units return obj def __repr__(self): \"\"\" \"\"\" return", "'cm') >>> uconcatenate((A, B)) YTArray([ 1., 2., 3., 2., 3.,", "# # Start comparison operators. # def __lt__(self, other): \"\"\"", "Using this option may produce corrupted, invalid units or array", "element of third element of the returned tuple, itself a", "divide: divide_units, logaddexp: return_without_unit, logaddexp2: return_without_unit, true_divide: divide_units, floor_divide: divide_units,", "-------- >>> E = YTQuantity(2.5, \"erg/s\") >>> E_new = E.in_base(unit_system=\"galactic\")", "dataset has a units attribute, attempt to infer units as", "(column wise) while preserving units This is a wrapper around", "write to the file. fmt : str or sequence of", "% \", \".join(units)) return tuple([YTArray(arr, unit) for arr, unit in", "comparison_unit, not_equal: comparison_unit, equal: comparison_unit, logical_and: comparison_unit, logical_or: comparison_unit, logical_xor:", "numpy issue #9081 return type(self)(super(YTArray, self).__pos__(), self.units) @return_arr def dot(self,", "np.asarray(inps[1]), out=out, **kwargs) if unit_operator in (multiply_units, divide_units): out, out_arr,", "0: continue if line[0] == comments: if next_one: units =", ">>> b = a.to_pint() \"\"\" from pint import UnitRegistry if", "= ds.sphere(\"c\", (100,\"kpc\")) >>> a = sp[\"density\"] >>> b =", "+ \"an AstroPy quantity.\") return self.value*_astropy.units.Unit(str(self.units), **kwargs) @classmethod def from_pint(cls,", "'code_length') >>> a.in_cgs() YTArray([ 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24]) cm", "and oneway_or_equivalent: new_arr = this_equiv.convert( self, conv_unit.dimensions, **kwargs) if isinstance(new_arr,", "(bs, Rational(exponent))) p_units = \"*\".join(p_units) if isinstance(arr.magnitude, np.ndarray): return YTArray(arr.magnitude,", "bitop_units, frexp: return_without_unit, floor: passthrough_unit, ceil: passthrough_unit, trunc: passthrough_unit, spacing:", "of formats. delimiter : str, optional String or character separating", "YTQuantity(YTArray): \"\"\" A scalar associated with a unit. Parameters ----------", "with this YTArray or YTQuantity. \"\"\" self.units.list_equivalencies() def has_equivalent(self, equiv):", "used. Examples -------- >>> E = YTQuantity(2.5, \"erg/s\") >>> E.convert_to_base(unit_system=\"galactic\")", "6], 'm') >>> a + b YTArray([ 401., 502., 603.])", "The Pint UnitRegistry to use in the conversion. If one", "= ret_class(np.asarray(out_arr), unit) if out is not None: out_orig[0].flat[:] =", ">>> yt.savetxt(\"sphere.dat\", [a,b,c], header='My sphere stuff', delimiter=\"\\t\") \"\"\" if not", "if isinstance(x, np.ndarray): return data.ds.arr(x, units) else: return data.ds.quan(x, units)", "Team. # # Distributed under the terms of the Modified", "python syntax (cm**3, not cm^3). registry : A UnitRegistry object", "def uconcatenate(arrs, axis=0): \"\"\"Concatenate a sequence of arrays. This wrapper", "the same units. See the documentation of numpy.intersect1d for full", "the last dimension. This is a wrapper around np.stack that", "it's not already a Unit obj. if not isinstance(other_units, Unit):", "the same # dimensions. if isinstance(ret, YTArray): if inp.units.same_dimensions_as(ret.units): ret.in_units(inp.units)", "like this instead: \\n\" \"ds.arr(%s, \\\"%s\\\")\" % (input_array, input_units) )", ">>> a = YTArray(np.arange(8) - 4, 'g/cm**3') >>> np.abs(a) YTArray([4,", "array data, but can lead to significant speedups in the", "beginning of the file, before the unit header. footer :", "def __ifloordiv__(self, other): \"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self,", "\"\"\" YTArray class. \"\"\" from __future__ import print_function #----------------------------------------------------------------------------- #", "return ret raise YTUnitOperationError(op_string, inp.units, ret.units) ret = ret.in_units(inp.units) else:", "to_pint(self, unit_registry=None): \"\"\" Convert a YTArray or YTQuantity to a", "np.hstack that preserves units. \"\"\" v = np.hstack(arrs) v =", "return type(self)(new_arr[0], new_arr[1]).in_units(unit) except YTUnitConversionError: raise YTInvalidUnitEquivalence(equiv, self.units, unit) else:", "as np >>> ureg = UnitRegistry() >>> a = np.random.random(10)", "only a constant factor but not in the same units.", "information. \"\"\" if _astropy.units is None: raise ImportError(\"You don't have", "None: if group_name in f: g = f[group_name] else: g", "unit_quantity @property def unit_array(self): \"\"\"Get a YTArray filled with ones", "a dataset in an hdf5 file into a YTArray. Parameters", "optional group to write the arrays to. If not specified,", "`+` operator. Must check for the correct (same dimension) units.", "def from_astropy(cls, arr, unit_registry=None): \"\"\" Convert an AstroPy \"Quantity\" to", "unit2): return unit1 * unit2 def preserve_units(unit1, unit2=None): return unit1", "= sanitize_units_mul(self, left_object) return super(YTArray, self).__rmul__(lo) def __imul__(self, other): \"\"\"", "arrays along a new axis while preserving units The axis", "*must* be a valid unit object. Defaults to False. Examples", "POWER_SIGN_MAPPING[ufunc] if 'axis' in kwargs and kwargs['axis'] is not None:", "axisc=-1, axis=None): \"\"\"Applies the cross product to two YT arrays.", "adds significant overhead. If set, input_units *must* be a valid", "cls1 if cls1 in (np.ndarray, np.matrix, np.ma.masked_array) or issubclass(cls1, (numeric_type,", "unit return out_arr def copy(self, order='C'): return type(self)(np.copy(np.asarray(self)), self.units) def", "\"\"\" # Check that other is a YTArray. if other", "YTArrays with unit information from a text file. Each row", "default base units of cgs are used. Examples -------- >>>", "units[0].is_dimensionless u2d = units[1].is_dimensionless any_nonzero = [np.any(inps[0]), np.any(inps[1])] if any_nonzero[0]", "return YTQuantity(norm, data.units) return YTArray(norm, data.units) def udot(op1, op2): \"\"\"Matrix", "array with the data in the equivalent mks units, and", "from AstroPy Quantity u = arr.unit ap_units = [] for", "uhstack(arrs): \"\"\"Stack arrays in sequence horizontally (column wise) while preserving", "a YTArray([ 4.01, 5.02, 6.03]) m NumPy ufuncs will pass", "array in arrays: if hasattr(array, \"units\"): units.append(str(array.units)) else: units.append(\"dimensionless\") if", "cosh, tanh, arcsinh, arccosh, arctanh, deg2rad, rad2deg, invert, logical_not, isreal,", "dimension and if ``axis=-1`` it will be the last dimension.", ": Pint Quantity The Quantity to convert from. unit_registry :", "YTUnitOperationError(op_string, inp.units, ret.units) ret = ret.in_units(inp.units) else: # If the", "UnitRegistry, optional The Pint UnitRegistry to use in the conversion.", "None: v = self.value else: v = self.in_units(units, equivalence=equivalence, **kwargs).value", "string The name of the dataset to read from. If", "fmax: preserve_units, fmin: preserve_units, isreal: return_without_unit, iscomplex: return_without_unit, isfinite: return_without_unit,", "right of the `*` operator. The unit objects handle being", "not all(a.units == a1.units for a in arrs[1:]): raise RuntimeError(\"Your", "None: out_arr = np.array(out_arr, copy=False) elif ufunc in (modf, divmod_):", "return None def invert_units(unit): raise TypeError( \"Bit-twiddling operators are not", "\"*\".join(units) return unit_registry.Quantity(self.value, units) # # End unit conversion methods", "ret = super(YTArray, self).__array_wrap__(out_arr, context) if isinstance(ret, YTQuantity) and ret.shape", "~yt.units.unit_registry.UnitRegistry The registry to create units from. If input_units is", "YTQuantity(5, 'code_length', registry=ds.unit_registry) >>> np.all(a == b) True \"\"\" def", "\"\"\"copy.deepcopy implementation This is necessary for stdlib deepcopy of arrays", "------- Quantity object with data converted to cgs units. \"\"\"", "specification, unit symbol object, or astropy units The units of", "1.0: if not units[0].is_dimensionless: if units[0].dimensions == units[1].dimensions: out_arr =", "def __eq__(self, other): \"\"\" Test if this is equal to", "unit, equiv, **kwargs): \"\"\" Convert a YTArray or YTQuantity to", "registry is not input_units.registry: units = Unit(str(input_units), registry=registry) else: units", "any_nonzero[1] == np.bool_(False): units = (units[0], units[0]) else: if not", "the documentation for that function for descriptions of the keyword", "validate_numpy_wrapper_units(v, [arr1, arr2]) return v def uunion1d(arr1, arr2): \"\"\"Find the", "registry : ~yt.units.unit_registry.UnitRegistry The registry to create units from. If", "= em_dimensions.get(my_units.dimensions, None) if equiv_dims == other_units.dimensions: if current_mks in", "if none is found, uses np.float64 bypass_validation : boolean If", "right_object): \"\"\" Divide this YTArray by the object on the", "None: ret.units = input_array.units else: units = Unit(str(input_array.units), registry=registry) ret.units", "indicate the start of a comment; default: '#'. Examples --------", "import Unit, UnitParseError from yt.units.unit_registry import UnitRegistry from yt.units.dimensions import", "= sanitize_units_add(self, left_object, \"subtraction\") return super(YTArray, self).__rsub__(lo) def __isub__(self, other):", "a sequence of arrays. This wrapper around numpy.concatenate preserves units.", "return other if not this.units.same_dimensions_as(other.units): raise YTUnitOperationError(op_string, this.units, other.units) return", "quantity. Returns Unit object. \"\"\" # let Unit() handle units", "as np >>> a = YTArray(np.arange(8) - 4, 'g/cm**3') >>>", "for details. \"\"\" return self.in_units(units, equivalence=equivalence, **kwargs) def to_value(self, units=None,", "\"\"\" p_units = [] for base, exponent in arr._units.items(): bs", "associated with the unit object. dtype : data-type The dtype", "formed ndarray instance # We first cast to be our", "lut = state[0] except TypeError: # this case happens when", "the right of the `*` operator. The unit objects handle", "= coerce_iterable_units(inputs[0]) inp2 = coerce_iterable_units(inputs[1]) unit1 = getattr(inp1, 'units', None)", "of the `/` operator. \"\"\" ro = sanitize_units_mul(self, right_object) return", "greater than or equal to other. \"\"\" # Check that", "= handle_multiply_divide_units( unit, units, out_arr, out_arr) else: raise RuntimeError( \"Support", "np.bool_(False): units = (units[1], units[1]) elif any_nonzero[1] == np.bool_(False): units", "if len(lut['m']) == 2: lut.update(default_unit_symbol_lut) for k, v in [(k,", "np.matrix, np.ma.masked_array) or issubclass(cls2, (numeric_type, np.number, list, tuple)): return cls1", "left_object): return super(YTArray, self).__ror__(left_object) def __ior__(self, other): np.bitwise_or(self, other, out=self)", "See __div__. \"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rdiv__(lo)", "op_string): # Check that other is a YTArray. if hasattr(other,", "same as a yt UnitRegistry object. Examples -------- >>> a", "\"hr\" ap_units.append(\"%s**(%s)\" % (unit_str, Rational(exponent))) ap_units = \"*\".join(ap_units) if isinstance(arr.value,", "def __idiv__(self, other): \"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self,", "d.attrs[k] = v f.close() @classmethod def from_hdf5(cls, filename, dataset_name=None, group_name=None):", "factor but not in the same units. Parameters ---------- unit", "def __rdiv__(self, left_object): \"\"\" See __div__. \"\"\" lo = sanitize_units_mul(self,", "is NotImplemented: return input_array.view(cls) if registry is None and isinstance(input_units,", "---------- input_scalar : an integer or floating point scalar The", "specified unit system. Parameters ---------- unit_system : string, optional The", "returns it without units. Output is therefore a bare NumPy", "b, out=None): return super(YTArray, self).dot(b), self.units*b.units def __reduce__(self): \"\"\"Pickle reduction", "comments=comments, delimiter=delimiter, converters=None, unpack=True, usecols=usecols, ndmin=0) if usecols is not", "numeric_type from yt.utilities.on_demand_imports import _astropy from sympy import Rational from", "if iterable(input_object): if any([isinstance(o, YTArray) for o in input_object]): ff", "= \"*\".join(p_units) if isinstance(arr.magnitude, np.ndarray): return YTArray(arr.magnitude, p_units, registry=unit_registry) else:", "ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__div__(ro) def __rdiv__(self, left_object):", "units = [units[col] for col in usecols] mylog.info(\"Array units: %s\"", "any whitespace. usecols : sequence, optional Which columns to read,", "power: power_unit, remainder: preserve_units, mod: preserve_units, fmod: preserve_units, absolute: passthrough_unit,", "full details. Examples -------- >>> A = yt.YTArray([1, 2, 3],", "a wrapper around np.stack that preserves units. \"\"\" v =", "self.d values *= conversion_factor if offset: np.subtract(self, offset*self.uq, self) return", "subtract, multiply, divide, logaddexp, logaddexp2, true_divide, power, remainder, mod, arctan2,", "units. \"\"\" dot = np.dot(op1.d, op2.d) units = op1.units*op2.units if", "isinstance(input_array[0], YTArray): return YTArray(np.array(input_array, dtype=dtype), input_array[0].units, registry=registry) # Input array", "other, \"addition\") np.add(self, oth, out=self) return self def __sub__(self, right_object):", "a YTArray, so it can't be None. return False oth", "of the ndarray. This is always defined for numpy arrays.", "return super(YTArray, self).__gt__(oth) # # End comparison operators # #", "__mul__. \"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rmul__(lo) def", "b) True \"\"\" _ufunc_registry = { add: preserve_units, subtract: preserve_units,", "This happens if you do ndarray * YTQuantity. Explicitly #", "then serialized by pickle. \"\"\" super(YTArray, self).__setstate__(state[1:]) try: unit, lut", "quantity which is not in the same dimensions. .. note::", "other is a YTArray. if other is None: # self", "is a YTArray and has the same dimensions as the", "zip(arrays, units)]) def savetxt(fname, arrays, fmt='%.18e', delimiter='\\t', header='', footer='', comments='#'):", "3., 4.]) cm \"\"\" v = np.union1d(arr1, arr2) v =", "def to_pint(self, unit_registry=None): \"\"\" Convert a YTArray or YTQuantity to", "to a Pint Quantity. Parameters ---------- arr : YTArray or", "**kwargs) def to_value(self, units=None, equivalence=None, **kwargs): \"\"\" Creates a copy", "ndarray * YTQuantity. Explicitly # casting to YTArray avoids creating", "comparison_unit, equal: comparison_unit, logical_and: comparison_unit, logical_or: comparison_unit, logical_xor: comparison_unit, logical_not:", "the default one will be used. Examples -------- >>> from", "sanitize_units_mul(this_object, other_object): inp = coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) # If", "incomplete units header. Arrays will be \" \"dimensionless!\") units =", "unitful quantity, try the :meth:`list_equivalencies` method. Default: None Returns -------", "delimiter=delimiter, converters=None, unpack=True, usecols=usecols, ndmin=0) if usecols is not None:", "if units is None: v = self.value else: v =", "# # Start unit conversion methods # def convert_to_units(self, units):", "lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rfloordiv__(lo) def __ifloordiv__(self, other):", "units. Parameters ---------- units : Unit object or str The", "installed, so you can't convert to \" + \"an AstroPy", ": str, optional The character used to indicate the start", "YTArray): ret = input_array.view(cls) if input_units is None: if registry", "if dot.shape == (): return YTQuantity(dot, units) return YTArray(dot, units)", "return False oth = validate_comparison_units(self, other, 'equal') return super(YTArray, self).__eq__(oth)", "-------- >>> a = yt.YTArray(1.0e7,\"K\") >>> a.to_equivalent(\"keV\", \"thermal\") \"\"\" conv_unit", ": float or dimensionless YTArray. The pow value. \"\"\" if", "oth = validate_comparison_units(self, other, 'equal') return super(YTArray, self).__eq__(oth) def __ne__(self,", "data.units) def udot(op1, op2): \"\"\"Matrix or vector dot product that", "quantity, try the :meth:`list_equivalencies` method. Default: None Returns ------- YTArray", "= get_inp_u_unary(ufunc, inputs, out_arr) unit = self._ufunc_registry[context[0]](u) ret_class = type(self)", "the right. \"\"\" oth = validate_comparison_units(self, other, 'less_than or equal')", "of the `*` operator. The unit objects handle being multiplied.", "ndarray subclass that attaches a symbolic unit object to the", "get away with it. if d.shape == self.shape and d.dtype", "not units[0].is_dimensionless: if units[0].dimensions == units[1].dimensions: out_arr = np.multiply(out_arr.view(np.ndarray), unit.base_value,", "if ``axis=-1`` it will be the last dimension. This is", "supported for this unitful quantity, try the :meth:`list_equivalencies` method. Examples", "bitop_units, invert: invert_units, left_shift: bitop_units, right_shift: bitop_units, greater: comparison_unit, greater_equal:", "= self.units**self.size return super(YTArray, self).prod(axis, dtype, out), units @return_arr def", "to. equiv : string The equivalence you wish to use.", "{'field':'dinosaurs', 'type':'field_data'} >>> a.write_hdf5('test_array_data.h5', dataset_name='dinosaurs', ... info=myinfo) \"\"\" from yt.utilities.on_demand_imports", "return float(v) else: return v def in_base(self, unit_system=\"cgs\"): \"\"\" Creates", "are passed to the equivalency, which should be used if", "the supplied units, and returns it. Optionally, an equivalence can", "which equivalencies are supported for this unitful quantity, try the", "this is less than the object on the right. \"\"\"", "are not defined for YTArray instances\") def bitop_units(unit1, unit2): raise", "\"\"\" Creates a copy of this array with the unit", "Default: '# ', as expected by e.g. ``yt.loadtxt``. Examples --------", "columns. The default, None, results in all columns being read.", "unit2 = getattr(inp2, 'units', None) ret_class = get_binary_op_return_class(type(inp1), type(inp2)) if", "base = \"CGS\" raise YTEquivalentDimsError(my_units, other_units, base) if not my_units.same_dimensions_as(other_units):", "a + b 201.0 cm >>> b + a 2.01", "in (comparison_unit, arctan2_unit): inps, units = handle_comparison_units( inps, units, ufunc,", "or string The units you want to get a new", "unit = unit_operator(*units) out_arr = func(np.asarray(inps[0]), np.asarray(inps[1]), out=out, **kwargs) if", "sqrt_unit(unit): return unit**0.5 @lru_cache(maxsize=128, typed=False) def multiply_units(unit1, unit2): return unit1", ">>> a = sp[\"density\"] >>> b = sp[\"temperature\"] >>> c", "[] for base, exponent in zip(u.bases, u.powers): unit_str = base.to_string()", "or equal to the object on the right. \"\"\" oth", "YTArrays.\") a1 = arrs[0] if not all(a.units == a1.units for", "= ureg.Quantity(a, \"erg/cm**3\") >>> c = yt.YTArray.from_pint(b) \"\"\" p_units =", "if raise_error: raise YTUfuncUnitError(ufunc, *units) inps = (inps[0], ret_class(inps[1]).to( ret_class(inps[0]).units))", "float or dimensionless YTArray. The pow value. \"\"\" if isinstance(power,", "if any([ff != getattr(_, 'units', NULL_UNIT) for _ in input_object]):", "yt datasets: >>> import yt >>> ds = yt.load('IsolatedGalaxy/galaxy0030/galaxy0030') >>>", ">>> a = np.random.random(10) >>> b = ureg.Quantity(a, \"erg/cm**3\") >>>", "comments='#'): r\"\"\" Write YTArrays with unit information to a text", "strings, to mark them as comments. Default: '# ', as", "floor: passthrough_unit, ceil: passthrough_unit, trunc: passthrough_unit, spacing: passthrough_unit, positive: passthrough_unit,", "See the documentation for the standard library pickle module: http://docs.python.org/2/library/pickle.html", "equivalence_registry[equiv]() oneway_or_equivalent = ( conv_unit.has_equivalent(equiv) or this_equiv._one_way) if self.has_equivalent(equiv) and", "if len(words) == 0: continue if line[0] == comments: if", ">>> a = YTArray([1,2,3], 'cm') >>> myinfo = {'field':'dinosaurs', 'type':'field_data'}", "unit_str = base.to_string() # we have to do this because", "sympy issue (I think?) # # If I don't do", "__div__. \"\"\" lo = sanitize_units_mul(self, left_object) return super(YTArray, self).__rdiv__(lo) def", "a YTArray or YTQuantity to a Pint Quantity. Parameters ----------", "if inp.units.same_dimensions_as(ret.units): ret.in_units(inp.units) return ret def sanitize_units_add(this_object, other_object, op_string): inp", "bypass_validation : boolean If True, all input validation is skipped.", "other is a YTArray. oth = validate_comparison_units( self, other, 'greater", "np.cross(arr1, arr2, axisa=axisa, axisb=axisb, axisc=axisc, axis=axis) units = arr1.units *", "tuple used to restore the state of the ndarray. This", "-------- >>> a = YTArray([1,2,3], 'cm') >>> myinfo = {'field':'dinosaurs',", "YTArray The array(s) to write to the file. fmt :", "> 1: raise RuntimeError(\"YTQuantity instances must be scalars\") return ret", "\"Quantity\" to a YTArray or YTQuantity. Parameters ---------- arr :", "sanitize_units_mul(self, other) np.true_divide(self, oth, out=self) return self def __floordiv__(self, right_object):", "inp, u def get_inp_u_binary(ufunc, inputs): inp1 = coerce_iterable_units(inputs[0]) inp2 =", "\"\"\" Test if this is greater than or equal to", "else: v = self.in_units(units, equivalence=equivalence, **kwargs).value if isinstance(self, YTQuantity): return", "to a text file. Parameters ---------- fname : str The", ": string The equivalence you wish to use. To see", "registry=registry) # Attach the units obj.units = units return obj", "less, less_equal, not_equal, equal, logical_and, \\ logical_or, logical_xor, logical_not, maximum,", "401., 502., 603.]) cm >>> b + a YTArray([ 4.01,", "registry=registry) ret.units = units elif isinstance(input_units, Unit): ret.units = input_units", "logical_or, logical_xor, logical_not, maximum, minimum, fmax, fmin, \\ isreal, iscomplex,", "list, tuple)): return cls2 if cls2 in (np.ndarray, np.matrix, np.ma.masked_array)", "Add this ytarray to the object on the right of", "inps, units, ret_class = get_inp_u_binary(ufunc, inputs) if unit_operator in (preserve_units,", "the equivalent cgs units. \"\"\" return self.convert_to_units(self.units.get_cgs_equivalent()) def convert_to_mks(self): \"\"\"", "arctan2, \\ hypot, sinh, cosh, tanh, arcsinh, arccosh, arctanh, deg2rad,", "norm that preserves units This is a wrapper around np.linalg.norm", "np.divide(self, oth, out=self) return self def __truediv__(self, right_object): ro =", "if any_nonzero[0] == np.bool_(False): units = (units[1], units[1]) elif any_nonzero[1]", "this quantity. Returns Unit object. \"\"\" # let Unit() handle", "with the same unit information. \"\"\" if _astropy.units is None:", "power): return unit**power @lru_cache(maxsize=128, typed=False) def square_unit(unit): return unit*unit @lru_cache(maxsize=128,", "equal: comparison_unit, logical_and: comparison_unit, logical_or: comparison_unit, logical_xor: comparison_unit, logical_not: return_without_unit,", "\"\"\"Matrix or vector norm that preserves units This is a", "arrs): return v if not all(isinstance(a, YTArray) for a in", "= validate_numpy_wrapper_units(v, arrs) return v def ustack(arrs, axis=0): \"\"\"Join a", "trunc, modf, frexp, fabs, spacing, positive, isnat, ) binary_operators =", "get_inp_u_unary(ufunc, inputs, out_arr) unit = self._ufunc_registry[context[0]](u) ret_class = type(self) elif", "unpack=True, usecols=usecols, ndmin=0) if usecols is not None: units =", "norm = np.linalg.norm(data, ord=ord, axis=axis, keepdims=keepdims) if norm.shape == ():", "keepdims=False): \"\"\"Matrix or vector norm that preserves units This is", "for a in arrs): return v if not all(isinstance(a, YTArray)", "None: unit1 = Unit(registry=getattr(unit2, 'registry', None)) if unit2 is None", "The name of the dataset to create in the file.", "restores the unit data from the metadata extracted in __reduce__", "Convert the array and units to the given units. Parameters", "maximum, minimum, fmax, fmin, \\ isreal, iscomplex, isfinite, isinf, isnan,", "return self def convert_to_base(self, unit_system=\"cgs\"): \"\"\" Convert the array and", "usecols=(1,2), delimiter=\"\\t\") \"\"\" f = open(fname, 'r') next_one = False", "if isinstance(self, YTQuantity): return float(v) else: return v def in_base(self,", "function for descriptions of the keyword arguments. The keepdims argument", "zip(u.bases, u.powers): unit_str = base.to_string() # we have to do", "self).prod(axis, dtype, out), units @return_arr def mean(self, axis=None, dtype=None, out=None):", "self).sum(axis, dtype, out), self.units @return_arr def std(self, axis=None, dtype=None, out=None,", "a copy of this array with the data in the", "= super(YTArray, self).__reduce__() obj_state = np_ret[2] unit_state = (((str(self.units), self.units.registry.lut),)", "invert, left_shift, right_shift, \\ greater, greater_equal, less, less_equal, not_equal, equal,", "return type(self)(ret, input_units='') return super(YTArray, self).__pow__(power) def __abs__(self): \"\"\" Return", "unit = u**(power_sign*inp.size) else: unit = self._ufunc_registry[ufunc](u) ret_class = type(self)", "YTEquivalentDimsError(my_units, other_units, base) if not my_units.same_dimensions_as(other_units): raise YTUnitConversionError( my_units, my_units.dimensions,", ">>> sp = ds.sphere(\"c\", (100,\"kpc\")) >>> a = sp[\"density\"] >>>", "delimiter='\\t', header='', footer='', comments='#'): r\"\"\" Write YTArrays with unit information", "be our class type obj = np.asarray(input_array, dtype=dtype).view(cls) # Check", ", 0.47712125, 0.60205999, 0.69897 , 0.77815125, 0.84509804]) YTArray is tightly", "power, remainder, mod, absolute, rint, \\ sign, conj, exp, exp2,", "comparison_unit, greater_equal: comparison_unit, less: comparison_unit, less_equal: comparison_unit, not_equal: comparison_unit, equal:", "(%s) and (%s)\" % (cls1, cls2)) def loadtxt(fname, dtype='float', delimiter='\\t',", "it would be annoying to deal with them. >>> np.log10(a)", "value will be returned in the current units. equivalence :", "as pickle if dataset_name is None: dataset_name = 'array_data' f", "\"\"\" return super(YTArray, self).__repr__()+' '+self.units.__repr__() def __str__(self): \"\"\" \"\"\" return", "logical_xor, maximum, minimum, fmax, fmin, copysign, nextafter, ldexp, fmod, divmod_,", "'': header += '\\n' header += \" Units\\n \" +", "string An optional group to read the arrays from. If", "optional String that will be prepended to the ``header`` and", "left_object): \"\"\" See __add__. \"\"\" lo = sanitize_units_add(self, left_object, \"addition\")", "h5py from yt.extern.six.moves import cPickle as pickle if dataset_name is", "astropy units The units of the array. Powers must be", "on the right. \"\"\" oth = validate_comparison_units(self, other, 'less_than or", "get_inp_u_binary(ufunc, inputs): inp1 = coerce_iterable_units(inputs[0]) inp2 = coerce_iterable_units(inputs[1]) unit1 =", "hypot, sinh, cosh, tanh, arcsinh, arccosh, arctanh, deg2rad, rad2deg, \\", "all columns being read. comments : str, optional The character", "np.multiply(out_arr.view(np.ndarray), unit.base_value, out=out) unit = Unit(registry=unit.registry) return out, out_arr, unit", "== other_units.dimensions: if current_mks in equiv_dims.free_symbols: base = \"SI\" else:", "filename: string The filename to create and write a dataset", "3]) g/cm**3 and strip them when it would be annoying", "ret = super(YTArray, self).__deepcopy__(memodict) return type(self)(ret, copy.deepcopy(self.units)) class YTQuantity(YTArray): \"\"\"", "add_default_symbols=False) return cls(data, units, registry=registry) # # Start convenience methods", "do ndarray * YTQuantity. Explicitly # casting to YTArray avoids", "np_ret[2] unit_state = (((str(self.units), self.units.registry.lut),) + obj_state[:],) new_ret = np_ret[:2]", "else: unit_lut = None f.close() registry = UnitRegistry(lut=unit_lut, add_default_symbols=False) return", "YTQuantity to a Pint Quantity. Parameters ---------- arr : YTArray", "Rational(exponent))) p_units = \"*\".join(p_units) if isinstance(arr.magnitude, np.ndarray): return YTArray(arr.magnitude, p_units,", ">>> ureg = UnitRegistry() >>> a = np.random.random(10) >>> b", "This wrapper around numpy.concatenate preserves units. All input arrays must", "equivalency requires them. Parameters ---------- units : Unit object or", "positive: passthrough_unit, divmod_: passthrough_unit, isnat: return_without_unit, heaviside: preserve_units, } __array_priority__", "= np_ret[2] unit_state = (((str(self.units), self.units.registry.lut),) + obj_state[:],) new_ret =", "the state of the ndarray. This is always defined for", "dtype : data-type The dtype of the array data. Examples", "Unit() elif isinstance(input_units, Unit): if registry and registry is not", "unit object. dtype : data-type The dtype of the array", "arrs) return v def array_like_field(data, x, field): field = data._determine_fields(field)[0]", "2, 1, 0, 1, 2, 3]) g/cm**3 and strip them", "any_nonzero[0] == np.bool_(False): units = (units[1], units[1]) elif any_nonzero[1] ==", "want to get a new quantity in. equivalence : string,", "None: out_arr = ufunc(inp).view(np.ndarray) return out_arr, inp, u def get_inp_u_binary(ufunc,", "equiv_dims == other_units.dimensions: if current_mks in equiv_dims.free_symbols: base = \"SI\"", "def __floordiv__(self, right_object): ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__floordiv__(ro)", "ret_class(np.asarray(out_arr), unit) if out is not None: out_orig[0].flat[:] = out.flat[:]", "return new_ret def __setstate__(self, state): \"\"\"Pickle setstate method This is", "a = YTArray([1,2,3], 'cm') >>> myinfo = {'field':'dinosaurs', 'type':'field_data'} >>>", "---------- unit : string The unit that you wish to", "is necessary for stdlib deepcopy of arrays and quantities. \"\"\"", "out_arr = ufunc(inp).view(np.ndarray) return out_arr, inp, u def get_inp_u_binary(ufunc, inputs):", "to the equivalent cgs units. \"\"\" return self.convert_to_units(self.units.get_cgs_equivalent()) def convert_to_mks(self):", "\\n\" \"Perhaps you meant to do something like this instead:", "inps, units, ufunc, ret_class, raise_error=True) unit = unit_operator(*units) if unit_operator", "from yt.units.unit_registry import UnitRegistry from yt.units.dimensions import \\ angle, \\", "tan, arcsin, arccos, arctan, sinh, cosh, tanh, arcsinh, arccosh, arctanh,", "file. Parameters ---------- fname : str The file to write", "can be specified to convert to an equivalent quantity which", "2., 3., 4.]) cm \"\"\" v = np.concatenate(arrs, axis=axis) v", ", 0.77815125, 0.84509804]) YTArray is tightly integrated with yt datasets:", "are used. Examples -------- >>> E = YTQuantity(2.5, \"erg/s\") >>>", "YTQuantity with # size > 1 out_arr = YTArray(np.asarray(out_arr), unit)", ": str, optional String that will be prepended to the", "ret = ret.in_units(inp.units) else: # If the other object is", "rint, \\ sign, conj, exp, exp2, log, log2, log10, expm1,", "dataset. \\n\" \"Perhaps you meant to do something like this", "__ixor__(self, other): np.bitwise_xor(self, other, out=self) return self def __and__(self, right_object):", "bypass_validation=False): if not isinstance(input_scalar, (numeric_type, np.number, np.ndarray)): raise RuntimeError(\"YTQuantity values", "v = np.vstack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v def", ": str The file to write the YTArrays to. arrays", "the `units` # attribute. if isinstance(ret, YTArray): if not inp.units.same_dimensions_as(ret.units):", "@lru_cache(maxsize=128, typed=False) def _unit_repr_check_same(my_units, other_units): \"\"\" Takes a Unit object,", "the value will be returned in the current units. equivalence", "'type':'field_data'} >>> a.write_hdf5('test_array_data.h5', dataset_name='dinosaurs', ... info=myinfo) \"\"\" from yt.utilities.on_demand_imports import", "def __isub__(self, other): \"\"\" See __sub__. \"\"\" oth = sanitize_units_add(self,", "__rxor__(self, left_object): return super(YTArray, self).__rxor__(left_object) def __ixor__(self, other): np.bitwise_xor(self, other,", "return_without_unit, exp2: return_without_unit, log: return_without_unit, log2: return_without_unit, log10: return_without_unit, expm1:", "\"subtraction\") np.subtract(self, oth, out=self) return self def __neg__(self): \"\"\" Negate", "YTQuantity to an equivalent, e.g., something that is related by", "inps, units def handle_multiply_divide_units(unit, units, out, out_arr): if unit.is_dimensionless and", "an integer or floating point scalar The scalar to attach", "arg if it's not already a Unit obj. if not", "\"\"\" Convert a Pint \"Quantity\" to a YTArray or YTQuantity.", "return ret def __repr__(self): return str(self) def validate_numpy_wrapper_units(v, arrs): if", "units, ret_class = get_inp_u_binary(ufunc, inputs) if unit_operator in (preserve_units, comparison_unit,", "inputs) if unit_operator in (preserve_units, comparison_unit, arctan2_unit): inps, units =", "unit2): return NULL_UNIT def comparison_unit(unit1, unit2=None): return None def invert_units(unit):", "unit.is_dimensionless and unit.base_value != 1.0: if not units[0].is_dimensionless: if units[0].dimensions", "= ret.view(YTArray) if context is None: if ret.shape == ():", "be written at the end of the file. comments :", "expm1, log1p, sqrt, square, reciprocal, sin, cos, tan, arcsin, arccos,", "= arrs[0] if not all(a.units == a1.units for a in", "associated with a unit registry and this is specified, this", "heaviside ) trigonometric_operators = ( sin, cos, tan, ) class", "remainder: preserve_units, mod: preserve_units, fmod: preserve_units, absolute: passthrough_unit, fabs: passthrough_unit,", "the ndarray. This is always defined for numpy arrays. \"\"\"", "YTArray([ 2., 3.]) cm \"\"\" v = np.intersect1d(arr1, arr2, assume_unique=assume_unique)", "ret.view(np.ndarray) elif not np.any(this_object): return ret raise YTUnitOperationError(op_string, inp.units, ret.units)", "return ret # # Start operation methods # if LooseVersion(np.__version__)", "oth, out=self) return self def __neg__(self): \"\"\" Negate the data.", "same unit as this array and a value of 1.0\"\"\"", "or vector norm that preserves units This is a wrapper", "on the right of the `-` from this ytarray. Must", "with the abs of the data. \"\"\" return super(YTArray, self).__abs__()", "oth = sanitize_units_mul(self, other) np.floor_divide(self, oth, out=self) return self def", "return unit**-1 def passthrough_unit(unit, unit2=None): return unit def return_without_unit(unit, unit2=None):", "% (bs, Rational(exponent))) p_units = \"*\".join(p_units) if isinstance(arr.magnitude, np.ndarray): return", "of the `-` from this ytarray. Must check for the", "\"\"\"Join a sequence of arrays along a new axis while", "raise YTUnitConversionError( my_units, my_units.dimensions, other_units, other_units.dimensions) return other_units unary_operators =", "if this.units.expr is other.units.expr: if this.units.base_value == other.units.base_value: return other", "the first. For example, ``usecols = (1,4,5)`` will extract the", "the object on the right of the `/` operator. \"\"\"", "equiv_dims = em_dimensions.get(my_units.dimensions, None) if equiv_dims == other_units.dimensions: if current_mks", "a = yt.YTArray(1.0e7,\"K\") >>> a.to_equivalent(\"keV\", \"thermal\") \"\"\" conv_unit = Unit(unit,", "the data in the supplied units, and returns it. Optionally,", "%s ufunc has not been added \" \"to YTArray.\" %", "None: out_orig[0].flat[:] = out.flat[:] if isinstance(out_orig[0], YTArray): out_orig[0].units = unit", "remainder, mod, absolute, rint, \\ sign, conj, exp, exp2, log,", "input_array: if isinstance(input_array[0], YTArray): return YTArray(np.array(input_array, dtype=dtype), input_array[0].units, registry=registry) #", "YTArray([4, 5, 6], 'm') >>> a + b YTArray([ 401.,", "Unit): if registry and registry is not input_units.registry: units =", "optional String that will be written at the end of", "tuple((ret_class(o, unit) for o in out_arr)) elif out_arr.size == 1:", "= yt.loadtxt(\"sphere.dat\", usecols=(1,2), delimiter=\"\\t\") \"\"\" f = open(fname, 'r') next_one", "= get_inp_u_unary(ufunc, inputs) out_arr = func(np.asarray(inp), out=out, **kwargs) if ufunc", ") if isinstance(input_array, YTArray): ret = input_array.view(cls) if input_units is", "units) def get_binary_op_return_class(cls1, cls2): if cls1 is cls2: return cls1", "delimiter : str, optional The string used to separate values.", "---------- units : Unit object or string The units you", "kwargs and kwargs['axis'] is not None: unit = u**(power_sign*inp.shape[kwargs['axis']]) else:", "of known unit symbol, and check that it is compatible", "in_mks(self): \"\"\" Creates a copy of this array with the", "None) ret_class = get_binary_op_return_class(type(inp1), type(inp2)) if unit1 is None: unit1", "you want to convert to. \"\"\" new_units = _unit_repr_check_same(self.units, units)", "specifies the index of the new axis in the dimensions", "Rational(exponent))) ap_units = \"*\".join(ap_units) if isinstance(arr.value, np.ndarray): return YTArray(arr.value, ap_units,", "float. delimiter : str, optional The string used to separate", "versions, see # numpy issue #9081 return type(self)(super(YTArray, self).__pos__(), self.units)", "specified, the arrays are datasets at the top level by", "tuple)): return cls1 if issubclass(cls1, YTQuantity): return cls2 if issubclass(cls2,", "other_units, other_units.dimensions) return other_units unary_operators = ( negative, absolute, rint,", "return_without_unit, arccosh: return_without_unit, arctanh: return_without_unit, hypot: preserve_units, deg2rad: return_without_unit, rad2deg:", "out_arr) else: raise RuntimeError( \"Support for the %s ufunc with", "around np.linalg.norm that preserves units. See the documentation for that", "Parameters ---------- fname : str Filename to read. dtype :", "is None and isinstance(input_units, (str, bytes)): if input_units.startswith('code_'): raise UnitParseError(", "to cgs units. \"\"\" return self.in_units(self.units.get_cgs_equivalent()) def in_mks(self): \"\"\" Creates", "to write to the file. fmt : str or sequence", "len(words) == 0: continue if line[0] == comments: if next_one:", "Examples -------- >>> E = YTQuantity(2.5, \"erg/s\") >>> E_new =", "return super(YTArray, self).std(axis, dtype, out, ddof), self.units def __array_wrap__(self, out_arr,", "YTArray instances\") def get_inp_u_unary(ufunc, inputs, out_arr=None): inp = inputs[0] u", "to deal with them. >>> print(np.log10(a)) 1.07918124605 YTQuantity is tightly", "return super(YTArray, self).__ge__(oth) def __gt__(self, other): \"\"\" Test if this", "with the data in the equivalent cgs units, and returns", "YTArray.__new__(cls, input_scalar, input_units, registry, dtype=dtype, bypass_validation=bypass_validation) if ret.size > 1:", "delimiter=\"\\t\") \"\"\" f = open(fname, 'r') next_one = False units", "the `-` from this ytarray. Must check for the correct", "arrays are datasets at the top level by default. Examples", "sign: return_without_unit, conj: passthrough_unit, exp: return_without_unit, exp2: return_without_unit, log: return_without_unit,", "is in the file COPYING.txt, distributed with this software. #-----------------------------------------------------------------------------", "(): return ret[()] else: return ret ufunc = context[0] inputs", "units) return wrapped @lru_cache(maxsize=128, typed=False) def sqrt_unit(unit): return unit**0.5 @lru_cache(maxsize=128,", "other object is a YTArray and has the same dimensions", "inp.units.same_dimensions_as(ret.units): # handle special case of adding or subtracting with", "axis=0): \"\"\"Join a sequence of arrays along a new axis", "cgs units, and returns it. Returns ------- Quantity object with", "table # into the pickle file unit, lut = str(state[0]),", "with 0 being the first. For example, ``usecols = (1,4,5)``", "__array_priority__ = 2.0 def __new__(cls, input_array, input_units=None, registry=None, dtype=None, bypass_validation=False):", "op_string): inp = coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) # Make sure", "Unit obj. if not isinstance(other_units, Unit): other_units = Unit(other_units, registry=my_units.registry)", "two YT arrays. This wrapper around numpy.cross preserves units. See", "the text file must have the same number of values.", "Unit object. if self.units.is_dimensionless and power == -1: ret =", "validate_numpy_wrapper_units(v, arrs) return v def ucross(arr1, arr2, registry=None, axisa=-1, axisb=-1,", "def bitop_units(unit1, unit2): raise TypeError( \"Bit-twiddling operators are not defined", "input_units, registry, dtype=dtype, bypass_validation=bypass_validation) if ret.size > 1: raise RuntimeError(\"YTQuantity", "pickle if info is None: info = {} info['units'] =", "None: units = self.units**self.shape[axis] else: units = self.units**self.size return super(YTArray,", "super(YTArray, self).dot(b), self.units*b.units def __reduce__(self): \"\"\"Pickle reduction method See the", "unit) return ret_class(np.array(out_arr, copy=False), unit) else: # numpy version equal", "this will be used instead of the registry associated with", "(cls1, cls2)) def loadtxt(fname, dtype='float', delimiter='\\t', usecols=None, comments='#'): r\"\"\" Load", "fmax, fmin, copysign, nextafter, ldexp, fmod, divmod_, heaviside ) trigonometric_operators", "my_units.dimensions, other_units, other_units.dimensions) return other_units unary_operators = ( negative, absolute,", "units This is a wrapper around np.hstack that preserves units.", "\\ reciprocal, sin, cos, tan, arcsin, arccos, arctan, arctan2, \\", "unit1 is None: unit1 = Unit(registry=getattr(unit2, 'registry', None)) if unit2", "context=None): ret = super(YTArray, self).__array_wrap__(out_arr, context) if isinstance(ret, YTQuantity) and", "em_dimensions.get(my_units.dimensions, None) if equiv_dims == other_units.dimensions: if current_mks in equiv_dims.free_symbols:", "units of the array. Powers must be specified using python", "cast to be our class type obj = np.asarray(input_array, dtype=dtype).view(cls)", "arrs[0] if not all(a.units == a1.units for a in arrs[1:]):", "attribute, attempt to infer units as well. group_name: string An", "super(YTArray, self).__radd__(lo) def __iadd__(self, other): \"\"\" See __add__. \"\"\" oth", "comparison operators. # def __lt__(self, other): \"\"\" Test if this", "array data.\"\"\" return self.ndarray_view() d = ndview @property def unit_quantity(self):", "\"\"\" A scalar associated with a unit. Parameters ---------- input_scalar", "passthrough_unit, divmod_: passthrough_unit, isnat: return_without_unit, heaviside: preserve_units, } __array_priority__ =", "of the keyword arguments. The keepdims argument is ignored if", "info['unit_registry'] = np.void(pickle.dumps(self.units.registry.lut)) if dataset_name is None: dataset_name = 'array_data'", "that the other is a YTArray. oth = validate_comparison_units( self,", "def _unit_repr_check_same(my_units, other_units): \"\"\" Takes a Unit object, or string", "a Unit object, or string of known unit symbol, and", "= super(YTArray, self).__array_wrap__(out_arr, context) if isinstance(ret, YTQuantity) and ret.shape !=", "if you do ndarray * YTQuantity. Explicitly # casting to", "words = line.strip().split() if len(words) == 0: continue if line[0]", "in sequence horizontally (column wise) while preserving units This is", "of two arrays. A wrapper around numpy.intersect1d that preserves units.", "return YTQuantity(arr.magnitude, p_units, registry=unit_registry) def to_pint(self, unit_registry=None): \"\"\" Convert a", "import from yt.funcs def iterable(obj): try: len(obj) except: return False", "validate_comparison_units(self, other, 'not equal') return super(YTArray, self).__ne__(oth) def __ge__(self, other):", "need to fix up the lut if the pickle was", "if offset: np.subtract(new_array, offset*new_array.uq, new_array) return new_array else: return self.to_equivalent(units,", "= YTQuantity(1, 'cm') >>> b = YTQuantity(2, 'm') >>> a", "a wrapper around np.hstack that preserves units. \"\"\" v =", "all input validation is skipped. Using this option may produce", "Creates a copy of this array with the unit information", "(): ret = ret.view(YTArray) if context is None: if ret.shape", "= self.value else: v = self.in_units(units, equivalence=equivalence, **kwargs).value if isinstance(self,", "with the same unit and shape as this array\"\"\" return", "return super(YTArray, self).__mul__(ro) def __rmul__(self, left_object): \"\"\" See __mul__. \"\"\"", ">>> A = yt.YTArray([1, 2, 3], 'cm') >>> B =", "dataset_name=None, info=None, group_name=None): r\"\"\"Writes a YTArray to hdf5 file. Parameters", "this option may produce corrupted, invalid units or array data,", "= validate_numpy_wrapper_units(v, arrs) return v def ucross(arr1, arr2, registry=None, axisa=-1,", "YTQuantity(arr.magnitude, p_units, registry=unit_registry) def to_pint(self, unit_registry=None): \"\"\" Convert a YTArray", "'array_data' f = h5py.File(filename) if group_name is not None: if", "reciprocal_unit, sin: return_without_unit, cos: return_without_unit, tan: return_without_unit, sinh: return_without_unit, cosh:", "bitwise_or: bitop_units, bitwise_xor: bitop_units, invert: invert_units, left_shift: bitop_units, right_shift: bitop_units,", "modf, ldexp, frexp, fmod, floor, ceil, trunc, fabs, spacing try:", ">>> c = sp[\"velocity_x\"] >>> yt.savetxt(\"sphere.dat\", [a,b,c], header='My sphere stuff',", "must be specified using python syntax (cm**3, not cm^3). registry", "dataset. group_name: string An optional group to write the arrays", "return_without_unit, log1p: return_without_unit, sqrt: sqrt_unit, square: square_unit, reciprocal: reciprocal_unit, sin:", "the object on the right. \"\"\" # converts if possible", "to create in the file. info: dictionary A dictionary of", "def handle_multiply_divide_units(unit, units, out, out_arr): if unit.is_dimensionless and unit.base_value !=", "super(YTArray, self).__ror__(left_object) def __ior__(self, other): np.bitwise_or(self, other, out=self) return self", "the specified unit system, and returns it in that system's", "when the pickle format changed if len(lut['m']) == 2: lut.update(default_unit_symbol_lut)", "def convert_to_units(self, units): \"\"\" Convert the array and units to", "equivalence=equivalence, **kwargs) def to_value(self, units=None, equivalence=None, **kwargs): \"\"\" Creates a", "a 2.01 m NumPy ufuncs will pass through units where", "if len(v) == 2]: lut[k] = v + (0.0, r'\\rm{'", "this array with the data in the equivalent cgs units,", "for a in arrs[1:]): raise RuntimeError(\"Your arrays must have identical", "at the top level by default. Examples -------- >>> a", "quantity.\") return self.value*_astropy.units.Unit(str(self.units), **kwargs) @classmethod def from_pint(cls, arr, unit_registry=None): \"\"\"", "# # End unit conversion methods # def write_hdf5(self, filename,", "mks units. \"\"\" return self.convert_to_units(self.units.get_mks_equivalent()) def in_units(self, units, equivalence=None, **kwargs):", "invert: invert_units, left_shift: bitop_units, right_shift: bitop_units, greater: comparison_unit, greater_equal: comparison_unit,", "= (1,4,5)`` will extract the 2nd, 5th and 6th columns.", "this case happens when we try to load an old", ">>> np.abs(a) 12 g/cm**3 and strip them when it would", "array and units to the given units. Parameters ---------- units", "arr, unit_registry=None): \"\"\" Convert a Pint \"Quantity\" to a YTArray", "return (inp1, inp2), (unit1, unit2), ret_class def handle_preserve_units(inps, units, ufunc,", "len(obj) except: return False return True def return_arr(func): @wraps(func) def", "exponent in zip(u.bases, u.powers): unit_str = base.to_string() # we have", "corrupted, invalid units or array data, but can lead to", "to False. Examples -------- >>> from yt import YTArray >>>", "functools import wraps from numpy import \\ add, subtract, multiply,", "units = dataset.attrs.get('units', '') if 'unit_registry' in dataset.attrs.keys(): unit_lut =", "ndmin=0) if usecols is not None: units = [units[col] for", "self).__rdiv__(lo) def __idiv__(self, other): \"\"\" See __div__. \"\"\" oth =", "self.units**self.shape[axis] else: units = self.units**self.size return super(YTArray, self).prod(axis, dtype, out),", "horizontally (column wise) while preserving units This is a wrapper", "get a new quantity in. equivalence : string, optional The", "== np.bool_(False): units = (units[0], units[0]) elif not any([u1d, u2d]):", ", 0.30103 , 0.47712125, 0.60205999, 0.69897 , 0.77815125, 0.84509804]) YTArray", "bitop_units, greater: comparison_unit, greater_equal: comparison_unit, less: comparison_unit, less_equal: comparison_unit, not_equal:", "be returned in the current units. equivalence : string, optional", "b = YTArray(np.ones(5), 'code_length', registry=ds.unit_registry) >>> np.all(a == b) True", "being read. comments : str, optional The character used to", "YTArray. if other is None: # self is a YTArray,", "quantity, try the :meth:`list_equivalencies` method. Examples -------- >>> a =", "YTIterableUnitCoercionError(input_object) # This will create a copy of the data", "other_object, op_string): inp = coerce_iterable_units(this_object) ret = coerce_iterable_units(other_object) # Make", "ua = unit_array def __getitem__(self, item): ret = super(YTArray, self).__getitem__(item)", "if input_units is None: # Nothing provided. Make dimensionless... units", "RuntimeError(\"Not all of your arrays are YTArrays.\") a1 = arrs[0]", "= np.vstack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v def uhstack(arrs):", "a comment; default: '#'. Examples -------- >>> temp, velx =", "ret def validate_comparison_units(this, other, op_string): # Check that other is", "YTQuantity(1.0, self.units) uq = unit_quantity @property def unit_array(self): \"\"\"Get a", "str Filename to read. dtype : data-type, optional Data-type of", "YTArray. oth = validate_comparison_units(self, other, 'greater than') return super(YTArray, self).__gt__(oth)", "that particular equivalency requires them. Parameters ---------- units : Unit", ">>> a = yt.YTArray(1.0e7,\"K\") >>> a.to_equivalent(\"keV\", \"thermal\") \"\"\" conv_unit =", "registry=registry) # Input array is an already formed ndarray instance", ">>> np.abs(a) YTArray([4, 3, 2, 1, 0, 1, 2, 3])", "numpy.concatenate preserves units. All input arrays must have the same", "the file, before the unit header. footer : str, optional", "def get_inp_u_unary(ufunc, inputs, out_arr=None): inp = inputs[0] u = getattr(inp,", "oth = validate_comparison_units(self, other, 'less_than or equal') return super(YTArray, self).__le__(oth)", "and 6th columns. The default, None, results in all columns", "ret = ret.view(YTArray) if context is None: if ret.shape ==", "units. \"\"\" ro = sanitize_units_add(self, right_object, \"addition\") return super(YTArray, self).__add__(ro)", "return cls1 if issubclass(cls1, cls2): return cls1 if issubclass(cls2, cls1):", "\"\"\" See __div__. \"\"\" oth = sanitize_units_mul(self, other) np.floor_divide(self, oth,", "input_scalar : an integer or floating point scalar The scalar", "information to a text file. Parameters ---------- fname : str", "def uvstack(arrs): \"\"\"Stack arrays in sequence vertically (row wise) while", "the end of the file. comments : str, optional String", "registry=unit_registry) def to_pint(self, unit_registry=None): \"\"\" Convert a YTArray or YTQuantity", "( negative, absolute, rint, sign, conj, exp, exp2, log, log2,", "myinfo = {'field':'dinosaurs', 'type':'field_data'} >>> a.write_hdf5('test_array_data.h5', dataset_name='dinosaurs', ... info=myinfo) \"\"\"", "or dimensionless YTArray. The pow value. \"\"\" if isinstance(power, YTArray):", "so don't call YTArray directly. return type(args[0])(ret, units) return wrapped", "YTArray(norm, data.units) def udot(op1, op2): \"\"\"Matrix or vector dot product", "hasattr(other, 'units'): if this.units.expr is other.units.expr: if this.units.base_value == other.units.base_value:", "in arrs[1:]): raise RuntimeError(\"Your arrays must have identical units.\") v.units", "ret = super(YTArray, self).__pow__(power) return type(self)(ret, input_units='') return super(YTArray, self).__pow__(power)", "alias for YTArray.in_units(). See the docstrings of that function for", "__array_wrap__(self, out_arr, context=None): ret = super(YTArray, self).__array_wrap__(out_arr, context) if isinstance(ret,", "# Check that other is a YTArray. if other is", "this ytarray. Must check for the correct (same dimension) units.", "as attributes to the dataset. group_name: string An optional group", "heaviside except ImportError: positive, divmod_, isnat, heaviside = (None,)*4 from", "header != '': header += '\\n' header += \" Units\\n", "return super(YTArray, self).__rfloordiv__(lo) def __ifloordiv__(self, other): \"\"\" See __div__. \"\"\"", "attribute. if isinstance(ret, YTArray): if not inp.units.same_dimensions_as(ret.units): # handle special", "op1.units*op2.units if dot.shape == (): return YTQuantity(dot, units) return YTArray(dot,", "ytLogger as mylog from .pint_conversions import convert_pint_units NULL_UNIT = Unit()", "the equivalent base units in the specified unit system. Parameters", "= yt.YTArray([2, 3, 4], 'cm') >>> uconcatenate((A, B)) YTArray([ 1.,", "units) (conversion_factor, offset) = self.units.get_conversion_factor(new_units) self.units = new_units values =", "with this software. #----------------------------------------------------------------------------- import copy import numpy as np", "b = sp[\"temperature\"] >>> c = sp[\"velocity_x\"] >>> yt.savetxt(\"sphere.dat\", [a,b,c],", "this is less than or equal to the object on", "None)) elif ufunc is power: unit2 = inp2 if isinstance(unit2,", "deal with them. >>> print(np.log10(a)) 1.07918124605 YTQuantity is tightly integrated", "Quantity The Quantity to convert from. unit_registry : yt UnitRegistry,", "scalar to attach units to input_units : String unit specification,", "B = yt.YTArray([2, 3, 4], 'cm') >>> uintersect1d(A, B) YTArray([", "return NULL_UNIT def comparison_unit(unit1, unit2=None): return None def invert_units(unit): raise", "by the object on the right of the `*` operator.", "unit_operator is preserve_units: inps, units = handle_preserve_units( inps, units, ufunc,", "units)]) def savetxt(fname, arrays, fmt='%.18e', delimiter='\\t', header='', footer='', comments='#'): r\"\"\"", "= POWER_SIGN_MAPPING[ufunc] if 'axis' in kwargs and kwargs['axis'] is not", "must have the same units. See the documentation of numpy.intersect1d", "if this YTArray or YTQuantity has an equivalent unit in", "quantity in. If not specified, the value will be returned", "constant factor but not in the same units. Parameters ----------", "converts if possible oth = validate_comparison_units(self, other, 'less_than') return super(YTArray,", "that preserves units. \"\"\" v = np.vstack(arrs) v = validate_numpy_wrapper_units(v,", "with a unit registry and this is specified, this will", "Optionally, an equivalence can be specified to convert to an", "mylog.warning(\"Malformed or incomplete units header. Arrays will be \" \"dimensionless!\")", "Unit(input_units, registry=registry) # Attach the units obj.units = units return", "the file. comments : str, optional String that will be", "else: # numpy version equal to or newer than 1.13", "The units you want to get the bare quantity in.", "left_object, \"addition\") return super(YTArray, self).__radd__(lo) def __iadd__(self, other): \"\"\" See", "of the `+` operator. Must check for the correct (same", "cls2 else: raise RuntimeError(\"Undefined operation for a YTArray subclass. \"", "Examples -------- >>> from yt import YTArray >>> a =", "dimension) units. \"\"\" ro = sanitize_units_add(self, right_object, \"subtraction\") return super(YTArray,", "inp.units.same_dimensions_as(ret.units): ret.in_units(inp.units) return ret def sanitize_units_add(this_object, other_object, op_string): inp =", "= np.asarray(out_orig[0]) else: out = None if len(inputs) == 1:", "if header != '': header += '\\n' header += \"", "isinstance(x, np.ndarray): return data.ds.arr(x, units) else: return data.ds.quan(x, units) def", "\"\"\" v = np.stack(arrs) v = validate_numpy_wrapper_units(v, arrs) return v", "TypeError: # this case happens when we try to load", "End unit conversion methods # def write_hdf5(self, filename, dataset_name=None, info=None,", "yt.YTArray([2, 3, 4], 'cm') >>> uunion1d(A, B) YTArray([ 1., 2.,", "obj is None and hasattr(self, 'units'): return self.units = getattr(obj,", "== b) True \"\"\" def __new__(cls, input_scalar, input_units=None, registry=None, dtype=np.float64,", "optional String or character separating columns. header : str, optional", "ds.sphere(\"c\", (100,\"kpc\")) >>> a = sp[\"density\"] >>> b = sp[\"temperature\"]", "frexp: return_without_unit, floor: passthrough_unit, ceil: passthrough_unit, trunc: passthrough_unit, spacing: passthrough_unit,", "the array and units to the given units. Parameters ----------", "== \"Units\": next_one = True else: # Here we catch", "not specified, the arrays are datasets at the top level", "from_astropy(cls, arr, unit_registry=None): \"\"\" Convert an AstroPy \"Quantity\" to a", "self def __truediv__(self, right_object): ro = sanitize_units_mul(self, right_object) return super(YTArray,", "copy=False) return out_arr out_arr.units = unit if out_arr.size == 1:", "is equivalent to: >>> b = YTQuantity(5, 'code_length', registry=ds.unit_registry) >>>", "# End unit conversion methods # def write_hdf5(self, filename, dataset_name=None,", "\"\"\" ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__div__(ro) def __rdiv__(self,", "not been\" \"added to YTArray.\" % (str(ufunc), len(inputs))) if unit", "prior to PR #1728 # when the pickle format changed", "before we serialized the unit symbol lookup table # into", "Input array is an already formed ndarray instance # We", "a YTArray. Parameters ---------- filename: string The filename to of", "YTArray([ 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24, 3.08600000e+24]) cm This is equivalent", ">>> from yt import YTQuantity >>> a = YTQuantity(1, 'cm')", "1.13 or newer from numpy import positive, divmod as divmod_,", "getattr(_, 'units', NULL_UNIT) for _ in input_object]): raise YTIterableUnitCoercionError(input_object) #", "unit) else: out_arr = ret_class(np.asarray(out_arr), unit) if out is not", "that the other is a YTArray. if other is None:", "units = [\"dimensionless\"]*num_cols arrays = np.loadtxt(fname, dtype=dtype, comments=comments, delimiter=delimiter, converters=None,", "to the object on the right of the `+` operator.", "units = [] for array in arrays: if hasattr(array, \"units\"):", "YTUnitOperationError(op_string, this.units, other.units) return other.in_units(this.units) return other @lru_cache(maxsize=128, typed=False) def", "yt.extern.six.moves import cPickle as pickle if info is None: info", "units This is a wrapper around np.dot that preserves units.", "exp2: return_without_unit, log: return_without_unit, log2: return_without_unit, log10: return_without_unit, expm1: return_without_unit,", "unit = str(unit).replace(\"yr\",\"year\") units.append(\"%s**(%s)\" % (unit, Rational(pow))) units = \"*\".join(units)", "numpy.intersect1d for full details. Examples -------- >>> A = yt.YTArray([1,", "def __imul__(self, other): \"\"\" See __mul__. \"\"\" oth = sanitize_units_mul(self,", "ro = sanitize_units_mul(self, right_object) return super(YTArray, self).__floordiv__(ro) def __rfloordiv__(self, left_object):", "2, 3]) g/cm**3 and strip them when it would be", "ndview @property def unit_quantity(self): \"\"\"Get a YTQuantity with the same", "fmod, floor, ceil, trunc, fabs, spacing try: # numpy 1.13", "The filename to create and write a dataset to dataset_name:", ": Unit object or str The units you want to", "unit) else: return new_arr.in_units(unit) else: raise YTInvalidUnitEquivalence(equiv, self.units, unit) def", "Parameters ---------- filename: string The filename to create and write", "read from. If the dataset has a units attribute, attempt", "(cm**3, not cm^3). registry : ~yt.units.unit_registry.UnitRegistry The registry to create", "self).__repr__()+' '+self.units.__repr__() def __str__(self): \"\"\" \"\"\" return str(self.view(np.ndarray)) + '", "'registry', None)) elif ufunc is power: unit2 = inp2 if", "\"\"\" if equivalence is None: new_units = _unit_repr_check_same(self.units, units) (conversion_factor,", "dtype : data-type, optional Data-type of the resulting array; default:", "_ in input_object]): raise YTIterableUnitCoercionError(input_object) # This will create a", "return YTArray(np.array(out_arr), unit) return ret_class(np.array(out_arr, copy=False), unit) else: # numpy", "= this_equiv.convert( self, conv_unit.dimensions, **kwargs) if isinstance(new_arr, tuple): try: return", "This is a wrapper around np.linalg.norm that preserves units. See", "handle all the cases here, let the Unit class handle", "sanitize_units_mul(self, left_object) return super(YTArray, self).__rfloordiv__(lo) def __ifloordiv__(self, other): \"\"\" See", "r\"\"\"Writes a YTArray to hdf5 file. Parameters ---------- filename: string", "If True, all input validation is skipped. Using this option", "A UnitRegistry object The registry to create units from. If", "is not None: obj.units.registry = registry return obj if input_array", "np.ndarray): if isinstance(unit2, YTArray): if unit2.units.is_dimensionless: pass else: raise YTUnitOperationError(ufunc,", "data=self) else: d = g.create_dataset(dataset_name, data=self) for k, v in", "np.asarray(input_array, dtype=dtype).view(cls) obj.units = input_units if registry is not None:", "passthrough_unit, positive: passthrough_unit, divmod_: passthrough_unit, isnat: return_without_unit, heaviside: preserve_units, }", "return cls2 else: raise RuntimeError(\"Undefined operation for a YTArray subclass.", "A yt unit registry to use in the conversion. If", "preserve_units, multiply: multiply_units, divide: divide_units, logaddexp: return_without_unit, logaddexp2: return_without_unit, true_divide:", "6.03]) m NumPy ufuncs will pass through units where appropriate.", "convenience methods # @property def value(self): \"\"\"Get a copy of", "numpy import positive, divmod as divmod_, isnat, heaviside except ImportError:", "sure the other object is a YTArray before we use", "Unit object. # don't handle all the cases here, let", "B)) YTArray([ 1., 2., 3., 2., 3., 4.]) cm \"\"\"", "d[...] = self for k in d.attrs.keys(): del d.attrs[k] else:", "optional The string used to separate values. By default, this", "return super(YTArray, self).dot(b), self.units*b.units def __reduce__(self): \"\"\"Pickle reduction method See", "\"dimensionless!\") units = [\"dimensionless\"]*num_cols arrays = np.loadtxt(fname, dtype=dtype, comments=comments, delimiter=delimiter,", "Quantity. Parameters ---------- arr : YTArray or YTQuantity The unitful", "inputs): inp1 = coerce_iterable_units(inputs[0]) inp2 = coerce_iterable_units(inputs[1]) unit1 = getattr(inp1,", "not None: unit = u**(power_sign*inp.shape[kwargs['axis']]) else: unit = u**(power_sign*inp.size) else:", "An optional group to read the arrays from. If not", "np.linalg.norm(data, ord=ord, axis=axis) else: norm = np.linalg.norm(data, ord=ord, axis=axis, keepdims=keepdims)" ]
[ "= serializers.SerializerMethodField() class Meta: model = Post fields = [", "image class PostListSerializer(serializers.ModelSerializer): url = post_detail_url user = serializers.SerializerMethodField() class", "'html', ] def get_html(self, obj): return obj.get_markdown() def get_user(self, obj):", "except: image = None return image class PostListSerializer(serializers.ModelSerializer): url =", "user = serializers.SerializerMethodField() image = serializers.SerializerMethodField() html = serializers.SerializerMethodField() class", "class PostDetailSerializer(serializers.ModelSerializer): url = post_detail_url user = serializers.SerializerMethodField() image =", "PostCreateUpdateSerializer(serializers.ModelSerializer): class Meta: model = Post fields = [ #'id',", "import serializers from posts.models import Post class PostCreateUpdateSerializer(serializers.ModelSerializer): class Meta:", "'publish', 'user', 'image', 'html', ] def get_html(self, obj): return obj.get_markdown()", "Meta: model = Post fields = [ 'url', 'user', 'title',", "def get_image(self, obj): try: image = obj.image.url except: image =", "'url', 'user', 'title', 'content', 'publish', ] def get_user(self, obj): return", "return image class PostListSerializer(serializers.ModelSerializer): url = post_detail_url user = serializers.SerializerMethodField()", "posts.models import Post class PostCreateUpdateSerializer(serializers.ModelSerializer): class Meta: model = Post", "[ 'url', 'id', 'title', 'slug', 'content', 'publish', 'user', 'image', 'html',", "[ 'url', 'user', 'title', 'content', 'publish', ] def get_user(self, obj):", "post_detail_url = serializers.HyperlinkedIdentityField( view_name='posts-api:detail', lookup_field='slug', ) class PostDetailSerializer(serializers.ModelSerializer): url =", "class Meta: model = Post fields = [ #'id', 'title',", "post_detail_url user = serializers.SerializerMethodField() image = serializers.SerializerMethodField() html = serializers.SerializerMethodField()", "serializers.SerializerMethodField() html = serializers.SerializerMethodField() class Meta: model = Post fields", "serializers.SerializerMethodField() class Meta: model = Post fields = [ 'url',", "'content', 'publish', 'user', 'image', 'html', ] def get_html(self, obj): return", "#'slug', 'content', 'publish', ] post_detail_url = serializers.HyperlinkedIdentityField( view_name='posts-api:detail', lookup_field='slug', )", "] def get_html(self, obj): return obj.get_markdown() def get_user(self, obj): return", "image = serializers.SerializerMethodField() html = serializers.SerializerMethodField() class Meta: model =", "serializers.HyperlinkedIdentityField( view_name='posts-api:detail', lookup_field='slug', ) class PostDetailSerializer(serializers.ModelSerializer): url = post_detail_url user", "from rest_framework import serializers from posts.models import Post class PostCreateUpdateSerializer(serializers.ModelSerializer):", "'title', #'slug', 'content', 'publish', ] post_detail_url = serializers.HyperlinkedIdentityField( view_name='posts-api:detail', lookup_field='slug',", "def get_html(self, obj): return obj.get_markdown() def get_user(self, obj): return str(obj.user.username)", "get_html(self, obj): return obj.get_markdown() def get_user(self, obj): return str(obj.user.username) def", "return obj.get_markdown() def get_user(self, obj): return str(obj.user.username) def get_image(self, obj):", "get_image(self, obj): try: image = obj.image.url except: image = None", "obj.image.url except: image = None return image class PostListSerializer(serializers.ModelSerializer): url", "try: image = obj.image.url except: image = None return image", "PostListSerializer(serializers.ModelSerializer): url = post_detail_url user = serializers.SerializerMethodField() class Meta: model", "#'id', 'title', #'slug', 'content', 'publish', ] post_detail_url = serializers.HyperlinkedIdentityField( view_name='posts-api:detail',", "get_user(self, obj): return str(obj.user.username) def get_image(self, obj): try: image =", "'image', 'html', ] def get_html(self, obj): return obj.get_markdown() def get_user(self,", "obj): return str(obj.user.username) def get_image(self, obj): try: image = obj.image.url", "'title', 'slug', 'content', 'publish', 'user', 'image', 'html', ] def get_html(self,", "image = obj.image.url except: image = None return image class", "post_detail_url user = serializers.SerializerMethodField() class Meta: model = Post fields", "rest_framework import serializers from posts.models import Post class PostCreateUpdateSerializer(serializers.ModelSerializer): class", "Post fields = [ 'url', 'user', 'title', 'content', 'publish', ]", "= serializers.HyperlinkedIdentityField( view_name='posts-api:detail', lookup_field='slug', ) class PostDetailSerializer(serializers.ModelSerializer): url = post_detail_url", "= obj.image.url except: image = None return image class PostListSerializer(serializers.ModelSerializer):", "class PostListSerializer(serializers.ModelSerializer): url = post_detail_url user = serializers.SerializerMethodField() class Meta:", "str(obj.user.username) def get_image(self, obj): try: image = obj.image.url except: image", "= [ #'id', 'title', #'slug', 'content', 'publish', ] post_detail_url =", "= serializers.SerializerMethodField() html = serializers.SerializerMethodField() class Meta: model = Post", "user = serializers.SerializerMethodField() class Meta: model = Post fields =", "'user', 'title', 'content', 'publish', ] def get_user(self, obj): return str(obj.user.username)", "obj): return obj.get_markdown() def get_user(self, obj): return str(obj.user.username) def get_image(self,", "import Post class PostCreateUpdateSerializer(serializers.ModelSerializer): class Meta: model = Post fields", "serializers.SerializerMethodField() image = serializers.SerializerMethodField() html = serializers.SerializerMethodField() class Meta: model", "Post class PostCreateUpdateSerializer(serializers.ModelSerializer): class Meta: model = Post fields =", "<gh_stars>0 from rest_framework import serializers from posts.models import Post class", "class Meta: model = Post fields = [ 'url', 'id',", "lookup_field='slug', ) class PostDetailSerializer(serializers.ModelSerializer): url = post_detail_url user = serializers.SerializerMethodField()", "html = serializers.SerializerMethodField() class Meta: model = Post fields =", "Post fields = [ 'url', 'id', 'title', 'slug', 'content', 'publish',", "'user', 'image', 'html', ] def get_html(self, obj): return obj.get_markdown() def", "= [ 'url', 'user', 'title', 'content', 'publish', ] def get_user(self,", "class PostCreateUpdateSerializer(serializers.ModelSerializer): class Meta: model = Post fields = [", "'content', 'publish', ] post_detail_url = serializers.HyperlinkedIdentityField( view_name='posts-api:detail', lookup_field='slug', ) class", "url = post_detail_url user = serializers.SerializerMethodField() class Meta: model =", "Meta: model = Post fields = [ #'id', 'title', #'slug',", "'slug', 'content', 'publish', 'user', 'image', 'html', ] def get_html(self, obj):", "= None return image class PostListSerializer(serializers.ModelSerializer): url = post_detail_url user", "'url', 'id', 'title', 'slug', 'content', 'publish', 'user', 'image', 'html', ]", "url = post_detail_url user = serializers.SerializerMethodField() image = serializers.SerializerMethodField() html", "= Post fields = [ 'url', 'user', 'title', 'content', 'publish',", "from posts.models import Post class PostCreateUpdateSerializer(serializers.ModelSerializer): class Meta: model =", "model = Post fields = [ #'id', 'title', #'slug', 'content',", "= post_detail_url user = serializers.SerializerMethodField() class Meta: model = Post", "= Post fields = [ 'url', 'id', 'title', 'slug', 'content',", "fields = [ #'id', 'title', #'slug', 'content', 'publish', ] post_detail_url", "def get_user(self, obj): return str(obj.user.username) def get_image(self, obj): try: image", "= Post fields = [ #'id', 'title', #'slug', 'content', 'publish',", "'id', 'title', 'slug', 'content', 'publish', 'user', 'image', 'html', ] def", "= post_detail_url user = serializers.SerializerMethodField() image = serializers.SerializerMethodField() html =", "'publish', ] post_detail_url = serializers.HyperlinkedIdentityField( view_name='posts-api:detail', lookup_field='slug', ) class PostDetailSerializer(serializers.ModelSerializer):", "None return image class PostListSerializer(serializers.ModelSerializer): url = post_detail_url user =", "model = Post fields = [ 'url', 'user', 'title', 'content',", "Meta: model = Post fields = [ 'url', 'id', 'title',", ") class PostDetailSerializer(serializers.ModelSerializer): url = post_detail_url user = serializers.SerializerMethodField() image", "= serializers.SerializerMethodField() image = serializers.SerializerMethodField() html = serializers.SerializerMethodField() class Meta:", "class Meta: model = Post fields = [ 'url', 'user',", "fields = [ 'url', 'id', 'title', 'slug', 'content', 'publish', 'user',", "obj): try: image = obj.image.url except: image = None return", "return str(obj.user.username) def get_image(self, obj): try: image = obj.image.url except:", "model = Post fields = [ 'url', 'id', 'title', 'slug',", "PostDetailSerializer(serializers.ModelSerializer): url = post_detail_url user = serializers.SerializerMethodField() image = serializers.SerializerMethodField()", "fields = [ 'url', 'user', 'title', 'content', 'publish', ] def", "view_name='posts-api:detail', lookup_field='slug', ) class PostDetailSerializer(serializers.ModelSerializer): url = post_detail_url user =", "] post_detail_url = serializers.HyperlinkedIdentityField( view_name='posts-api:detail', lookup_field='slug', ) class PostDetailSerializer(serializers.ModelSerializer): url", "Post fields = [ #'id', 'title', #'slug', 'content', 'publish', ]", "image = None return image class PostListSerializer(serializers.ModelSerializer): url = post_detail_url", "obj.get_markdown() def get_user(self, obj): return str(obj.user.username) def get_image(self, obj): try:", "= [ 'url', 'id', 'title', 'slug', 'content', 'publish', 'user', 'image',", "serializers from posts.models import Post class PostCreateUpdateSerializer(serializers.ModelSerializer): class Meta: model", "[ #'id', 'title', #'slug', 'content', 'publish', ] post_detail_url = serializers.HyperlinkedIdentityField(" ]
[ "grid_number=1) self.oHelper.SetButton('Concluir') self.oHelper.SetButton('Fechar') self.oHelper.AssertTrue() @classmethod def tearDownClass(inst): inst.oHelper.TearDown() if __name__", "import unittest class GTPA036E(unittest.TestCase): @classmethod def setUpClass(inst): inst.oHelper = Webapp()", "\"T1\", \"D MG 01 \") inst.oHelper.Program('GTPA036') def test_GTPA036E_CT001(self): self.oHelper.SetButton('Avançar') self.oHelper.ClickLabel(\"Arquivo", "contents_list='', select_all=False, grid_number=1) self.oHelper.SetButton('Concluir') self.oHelper.SetButton('Fechar') self.oHelper.AssertTrue() @classmethod def tearDownClass(inst): inst.oHelper.TearDown()", "MG 01 \") inst.oHelper.Program('GTPA036') def test_GTPA036E_CT001(self): self.oHelper.SetButton('Avançar') self.oHelper.ClickLabel(\"Arquivo não formatado\")", "grid_number=1) '''self.oHelper.ClickGridCell(\"\", row=2, grid_number=1)''' self.oHelper.ClickBox(\"\", contents_list='', select_all=False, grid_number=1) self.oHelper.SetButton('Concluir') self.oHelper.SetButton('Fechar')", "formatado\") self.oHelper.SetButton('Avançar') self.oHelper.SetValue('XXX_DATADE', '02/08/2020') self.oHelper.SetValue('XXX_DATATE', '07/08/2020') self.oHelper.ScrollGrid(column='Agência', match_value='000048', grid_number=1) '''self.oHelper.ClickGridCell(\"\",", "from tir import Webapp import unittest class GTPA036E(unittest.TestCase): @classmethod def", "self.oHelper.SetButton('Fechar') self.oHelper.AssertTrue() @classmethod def tearDownClass(inst): inst.oHelper.TearDown() if __name__ == '__main__':", "inst.oHelper.Program('GTPA036') def test_GTPA036E_CT001(self): self.oHelper.SetButton('Avançar') self.oHelper.ClickLabel(\"Arquivo não formatado\") self.oHelper.SetButton('Avançar') self.oHelper.SetValue('XXX_DATADE', '02/08/2020')", "01 \") inst.oHelper.Program('GTPA036') def test_GTPA036E_CT001(self): self.oHelper.SetButton('Avançar') self.oHelper.ClickLabel(\"Arquivo não formatado\") self.oHelper.SetButton('Avançar')", "self.oHelper.SetButton('Avançar') self.oHelper.SetValue('XXX_DATADE', '02/08/2020') self.oHelper.SetValue('XXX_DATATE', '07/08/2020') self.oHelper.ScrollGrid(column='Agência', match_value='000048', grid_number=1) '''self.oHelper.ClickGridCell(\"\", row=2,", "grid_number=1)''' self.oHelper.ClickBox(\"\", contents_list='', select_all=False, grid_number=1) self.oHelper.SetButton('Concluir') self.oHelper.SetButton('Fechar') self.oHelper.AssertTrue() @classmethod def", "match_value='000048', grid_number=1) '''self.oHelper.ClickGridCell(\"\", row=2, grid_number=1)''' self.oHelper.ClickBox(\"\", contents_list='', select_all=False, grid_number=1) self.oHelper.SetButton('Concluir')", "def test_GTPA036E_CT001(self): self.oHelper.SetButton('Avançar') self.oHelper.ClickLabel(\"Arquivo não formatado\") self.oHelper.SetButton('Avançar') self.oHelper.SetValue('XXX_DATADE', '02/08/2020') self.oHelper.SetValue('XXX_DATATE',", "Webapp() inst.oHelper.Setup(\"SIGAGTP\", \"05/08/2020\", \"T1\", \"D MG 01 \") inst.oHelper.Program('GTPA036') def", "\"05/08/2020\", \"T1\", \"D MG 01 \") inst.oHelper.Program('GTPA036') def test_GTPA036E_CT001(self): self.oHelper.SetButton('Avançar')", "'07/08/2020') self.oHelper.ScrollGrid(column='Agência', match_value='000048', grid_number=1) '''self.oHelper.ClickGridCell(\"\", row=2, grid_number=1)''' self.oHelper.ClickBox(\"\", contents_list='', select_all=False,", "select_all=False, grid_number=1) self.oHelper.SetButton('Concluir') self.oHelper.SetButton('Fechar') self.oHelper.AssertTrue() @classmethod def tearDownClass(inst): inst.oHelper.TearDown() if", "def setUpClass(inst): inst.oHelper = Webapp() inst.oHelper.Setup(\"SIGAGTP\", \"05/08/2020\", \"T1\", \"D MG", "self.oHelper.SetValue('XXX_DATATE', '07/08/2020') self.oHelper.ScrollGrid(column='Agência', match_value='000048', grid_number=1) '''self.oHelper.ClickGridCell(\"\", row=2, grid_number=1)''' self.oHelper.ClickBox(\"\", contents_list='',", "@classmethod def setUpClass(inst): inst.oHelper = Webapp() inst.oHelper.Setup(\"SIGAGTP\", \"05/08/2020\", \"T1\", \"D", "self.oHelper.ClickBox(\"\", contents_list='', select_all=False, grid_number=1) self.oHelper.SetButton('Concluir') self.oHelper.SetButton('Fechar') self.oHelper.AssertTrue() @classmethod def tearDownClass(inst):", "self.oHelper.SetValue('XXX_DATADE', '02/08/2020') self.oHelper.SetValue('XXX_DATATE', '07/08/2020') self.oHelper.ScrollGrid(column='Agência', match_value='000048', grid_number=1) '''self.oHelper.ClickGridCell(\"\", row=2, grid_number=1)'''", "self.oHelper.SetButton('Concluir') self.oHelper.SetButton('Fechar') self.oHelper.AssertTrue() @classmethod def tearDownClass(inst): inst.oHelper.TearDown() if __name__ ==", "test_GTPA036E_CT001(self): self.oHelper.SetButton('Avançar') self.oHelper.ClickLabel(\"Arquivo não formatado\") self.oHelper.SetButton('Avançar') self.oHelper.SetValue('XXX_DATADE', '02/08/2020') self.oHelper.SetValue('XXX_DATATE', '07/08/2020')", "GTPA036E(unittest.TestCase): @classmethod def setUpClass(inst): inst.oHelper = Webapp() inst.oHelper.Setup(\"SIGAGTP\", \"05/08/2020\", \"T1\",", "unittest class GTPA036E(unittest.TestCase): @classmethod def setUpClass(inst): inst.oHelper = Webapp() inst.oHelper.Setup(\"SIGAGTP\",", "class GTPA036E(unittest.TestCase): @classmethod def setUpClass(inst): inst.oHelper = Webapp() inst.oHelper.Setup(\"SIGAGTP\", \"05/08/2020\",", "import Webapp import unittest class GTPA036E(unittest.TestCase): @classmethod def setUpClass(inst): inst.oHelper", "self.oHelper.ClickLabel(\"Arquivo não formatado\") self.oHelper.SetButton('Avançar') self.oHelper.SetValue('XXX_DATADE', '02/08/2020') self.oHelper.SetValue('XXX_DATATE', '07/08/2020') self.oHelper.ScrollGrid(column='Agência', match_value='000048',", "'02/08/2020') self.oHelper.SetValue('XXX_DATATE', '07/08/2020') self.oHelper.ScrollGrid(column='Agência', match_value='000048', grid_number=1) '''self.oHelper.ClickGridCell(\"\", row=2, grid_number=1)''' self.oHelper.ClickBox(\"\",", "tir import Webapp import unittest class GTPA036E(unittest.TestCase): @classmethod def setUpClass(inst):", "setUpClass(inst): inst.oHelper = Webapp() inst.oHelper.Setup(\"SIGAGTP\", \"05/08/2020\", \"T1\", \"D MG 01", "<reponame>98llm/tir-script-samples from tir import Webapp import unittest class GTPA036E(unittest.TestCase): @classmethod", "= Webapp() inst.oHelper.Setup(\"SIGAGTP\", \"05/08/2020\", \"T1\", \"D MG 01 \") inst.oHelper.Program('GTPA036')", "inst.oHelper.Setup(\"SIGAGTP\", \"05/08/2020\", \"T1\", \"D MG 01 \") inst.oHelper.Program('GTPA036') def test_GTPA036E_CT001(self):", "inst.oHelper = Webapp() inst.oHelper.Setup(\"SIGAGTP\", \"05/08/2020\", \"T1\", \"D MG 01 \")", "\") inst.oHelper.Program('GTPA036') def test_GTPA036E_CT001(self): self.oHelper.SetButton('Avançar') self.oHelper.ClickLabel(\"Arquivo não formatado\") self.oHelper.SetButton('Avançar') self.oHelper.SetValue('XXX_DATADE',", "self.oHelper.SetButton('Avançar') self.oHelper.ClickLabel(\"Arquivo não formatado\") self.oHelper.SetButton('Avançar') self.oHelper.SetValue('XXX_DATADE', '02/08/2020') self.oHelper.SetValue('XXX_DATATE', '07/08/2020') self.oHelper.ScrollGrid(column='Agência',", "\"D MG 01 \") inst.oHelper.Program('GTPA036') def test_GTPA036E_CT001(self): self.oHelper.SetButton('Avançar') self.oHelper.ClickLabel(\"Arquivo não", "não formatado\") self.oHelper.SetButton('Avançar') self.oHelper.SetValue('XXX_DATADE', '02/08/2020') self.oHelper.SetValue('XXX_DATATE', '07/08/2020') self.oHelper.ScrollGrid(column='Agência', match_value='000048', grid_number=1)", "self.oHelper.ScrollGrid(column='Agência', match_value='000048', grid_number=1) '''self.oHelper.ClickGridCell(\"\", row=2, grid_number=1)''' self.oHelper.ClickBox(\"\", contents_list='', select_all=False, grid_number=1)", "'''self.oHelper.ClickGridCell(\"\", row=2, grid_number=1)''' self.oHelper.ClickBox(\"\", contents_list='', select_all=False, grid_number=1) self.oHelper.SetButton('Concluir') self.oHelper.SetButton('Fechar') self.oHelper.AssertTrue()", "self.oHelper.AssertTrue() @classmethod def tearDownClass(inst): inst.oHelper.TearDown() if __name__ == '__main__': unittest.main()", "row=2, grid_number=1)''' self.oHelper.ClickBox(\"\", contents_list='', select_all=False, grid_number=1) self.oHelper.SetButton('Concluir') self.oHelper.SetButton('Fechar') self.oHelper.AssertTrue() @classmethod", "Webapp import unittest class GTPA036E(unittest.TestCase): @classmethod def setUpClass(inst): inst.oHelper =" ]
[ "DjangoFilterBackend from django.http import HttpResponse,FileResponse,JsonResponse from rest_framework_jwt.authentication import JSONWebTokenAuthentication from", "from rest_framework.generics import ListAPIView from rest_framework.filters import SearchFilter, OrderingFilter from", "HttpResponse,FileResponse,JsonResponse from rest_framework_jwt.authentication import JSONWebTokenAuthentication from rest_framework.permissions import IsAuthenticated from", "rest_framework.permissions import IsAuthenticated from rest_xops.basic import XopsResponse from rest_xops.code import", "rest_framework.viewsets import ModelViewSet from rest_framework.generics import ListAPIView from rest_framework.filters import", "from rest_xops.code import * from django.db.models import Q from django.apps", "{model_camel_case_name}View(ModelViewSet): queryset = {model_camel_case_name}.objects.all() serializer_class = {model_camel_case_name}Serializer filter_backends = (DjangoFilterBackend,", "from ..models import {model_camel_case_name} from django.contrib.contenttypes.models import ContentType from ..serializers.{model_name}_serializers", ": code_generator from rest_framework.viewsets import ModelViewSet from rest_framework.generics import ListAPIView", "import ContentType from ..serializers.{model_name}_serializers import * class {model_camel_case_name}View(ModelViewSet): queryset =", "from rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.response import Response from", "from django.apps import apps from ..models import {model_camel_case_name} from django.contrib.contenttypes.models", "import Q from django.apps import apps from ..models import {model_camel_case_name}", "class {model_camel_case_name}View(ModelViewSet): queryset = {model_camel_case_name}.objects.all() serializer_class = {model_camel_case_name}Serializer filter_backends =", "authentication_classes = (JSONWebTokenAuthentication,) permission_classes = (IsAuthenticated,) filter_fields = ({filter_fields}) search_fields", "django.db.models import Q from django.apps import apps from ..models import", "# @Time : {time} # @Author : code_generator from rest_framework.viewsets", "code_generator from rest_framework.viewsets import ModelViewSet from rest_framework.generics import ListAPIView from", "..serializers.{model_name}_serializers import * class {model_camel_case_name}View(ModelViewSet): queryset = {model_camel_case_name}.objects.all() serializer_class =", "ContentType from ..serializers.{model_name}_serializers import * class {model_camel_case_name}View(ModelViewSet): queryset = {model_camel_case_name}.objects.all()", "api_view,authentication_classes,permission_classes,action from common.custom import CommonPagination, RbacPermission from django_filters.rest_framework import DjangoFilterBackend", "import {model_camel_case_name} from django.contrib.contenttypes.models import ContentType from ..serializers.{model_name}_serializers import *", "from rest_framework.decorators import api_view,authentication_classes,permission_classes,action from common.custom import CommonPagination, RbacPermission from", "django.http import HttpResponse,FileResponse,JsonResponse from rest_framework_jwt.authentication import JSONWebTokenAuthentication from rest_framework.permissions import", "from django_filters.rest_framework import DjangoFilterBackend from django.http import HttpResponse,FileResponse,JsonResponse from rest_framework_jwt.authentication", "# @Author : code_generator from rest_framework.viewsets import ModelViewSet from rest_framework.generics", "rest_framework_jwt.authentication import JSONWebTokenAuthentication from rest_framework.permissions import IsAuthenticated from rest_xops.basic import", "import * class {model_camel_case_name}View(ModelViewSet): queryset = {model_camel_case_name}.objects.all() serializer_class = {model_camel_case_name}Serializer", "rest_framework.generics import ListAPIView from rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.response", "XopsResponse from rest_xops.code import * from django.db.models import Q from", "from django.contrib.contenttypes.models import ContentType from ..serializers.{model_name}_serializers import * class {model_camel_case_name}View(ModelViewSet):", "from django.db.models import Q from django.apps import apps from ..models", "= {model_camel_case_name}.objects.all() serializer_class = {model_camel_case_name}Serializer filter_backends = (DjangoFilterBackend, SearchFilter,OrderingFilter) pagination_class", "rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.response import Response from rest_framework.decorators", "from rest_xops.basic import XopsResponse from rest_xops.code import * from django.db.models", "{model_camel_case_name} from django.contrib.contenttypes.models import ContentType from ..serializers.{model_name}_serializers import * class", "= CommonPagination ordering_fields = ('id',) authentication_classes = (JSONWebTokenAuthentication,) permission_classes =", "import IsAuthenticated from rest_xops.basic import XopsResponse from rest_xops.code import *", "queryset = {model_camel_case_name}.objects.all() serializer_class = {model_camel_case_name}Serializer filter_backends = (DjangoFilterBackend, SearchFilter,OrderingFilter)", "import JSONWebTokenAuthentication from rest_framework.permissions import IsAuthenticated from rest_xops.basic import XopsResponse", "django.contrib.contenttypes.models import ContentType from ..serializers.{model_name}_serializers import * class {model_camel_case_name}View(ModelViewSet): queryset", "common.custom import CommonPagination, RbacPermission from django_filters.rest_framework import DjangoFilterBackend from django.http", "@Time : {time} # @Author : code_generator from rest_framework.viewsets import", "RbacPermission from django_filters.rest_framework import DjangoFilterBackend from django.http import HttpResponse,FileResponse,JsonResponse from", "<filename>code_tmpl/views.py<gh_stars>0 # @Time : {time} # @Author : code_generator from", "import * from django.db.models import Q from django.apps import apps", "from common.custom import CommonPagination, RbacPermission from django_filters.rest_framework import DjangoFilterBackend from", "from django.http import HttpResponse,FileResponse,JsonResponse from rest_framework_jwt.authentication import JSONWebTokenAuthentication from rest_framework.permissions", "= (JSONWebTokenAuthentication,) permission_classes = (IsAuthenticated,) filter_fields = ({filter_fields}) search_fields =", "import apps from ..models import {model_camel_case_name} from django.contrib.contenttypes.models import ContentType", "* from django.db.models import Q from django.apps import apps from", "apps from ..models import {model_camel_case_name} from django.contrib.contenttypes.models import ContentType from", "* class {model_camel_case_name}View(ModelViewSet): queryset = {model_camel_case_name}.objects.all() serializer_class = {model_camel_case_name}Serializer filter_backends", ": {time} # @Author : code_generator from rest_framework.viewsets import ModelViewSet", "import ListAPIView from rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.response import", "SearchFilter, OrderingFilter from rest_framework.response import Response from rest_framework.decorators import api_view,authentication_classes,permission_classes,action", "('id',) authentication_classes = (JSONWebTokenAuthentication,) permission_classes = (IsAuthenticated,) filter_fields = ({filter_fields})", "django_filters.rest_framework import DjangoFilterBackend from django.http import HttpResponse,FileResponse,JsonResponse from rest_framework_jwt.authentication import", "{time} # @Author : code_generator from rest_framework.viewsets import ModelViewSet from", "rest_xops.code import * from django.db.models import Q from django.apps import", "{model_camel_case_name}.objects.all() serializer_class = {model_camel_case_name}Serializer filter_backends = (DjangoFilterBackend, SearchFilter,OrderingFilter) pagination_class =", "from rest_framework.permissions import IsAuthenticated from rest_xops.basic import XopsResponse from rest_xops.code", "= {model_camel_case_name}Serializer filter_backends = (DjangoFilterBackend, SearchFilter,OrderingFilter) pagination_class = CommonPagination ordering_fields", "(DjangoFilterBackend, SearchFilter,OrderingFilter) pagination_class = CommonPagination ordering_fields = ('id',) authentication_classes =", "Q from django.apps import apps from ..models import {model_camel_case_name} from", "SearchFilter,OrderingFilter) pagination_class = CommonPagination ordering_fields = ('id',) authentication_classes = (JSONWebTokenAuthentication,)", "import ModelViewSet from rest_framework.generics import ListAPIView from rest_framework.filters import SearchFilter,", "ListAPIView from rest_framework.filters import SearchFilter, OrderingFilter from rest_framework.response import Response", "import HttpResponse,FileResponse,JsonResponse from rest_framework_jwt.authentication import JSONWebTokenAuthentication from rest_framework.permissions import IsAuthenticated", "django.apps import apps from ..models import {model_camel_case_name} from django.contrib.contenttypes.models import", "from rest_framework_jwt.authentication import JSONWebTokenAuthentication from rest_framework.permissions import IsAuthenticated from rest_xops.basic", "JSONWebTokenAuthentication from rest_framework.permissions import IsAuthenticated from rest_xops.basic import XopsResponse from", "ModelViewSet from rest_framework.generics import ListAPIView from rest_framework.filters import SearchFilter, OrderingFilter", "(JSONWebTokenAuthentication,) permission_classes = (IsAuthenticated,) filter_fields = ({filter_fields}) search_fields = ({search_fields})", "@Author : code_generator from rest_framework.viewsets import ModelViewSet from rest_framework.generics import", "import SearchFilter, OrderingFilter from rest_framework.response import Response from rest_framework.decorators import", "IsAuthenticated from rest_xops.basic import XopsResponse from rest_xops.code import * from", "Response from rest_framework.decorators import api_view,authentication_classes,permission_classes,action from common.custom import CommonPagination, RbacPermission", "from rest_framework.response import Response from rest_framework.decorators import api_view,authentication_classes,permission_classes,action from common.custom", "{model_camel_case_name}Serializer filter_backends = (DjangoFilterBackend, SearchFilter,OrderingFilter) pagination_class = CommonPagination ordering_fields =", "rest_framework.response import Response from rest_framework.decorators import api_view,authentication_classes,permission_classes,action from common.custom import", "filter_backends = (DjangoFilterBackend, SearchFilter,OrderingFilter) pagination_class = CommonPagination ordering_fields = ('id',)", "rest_xops.basic import XopsResponse from rest_xops.code import * from django.db.models import", "OrderingFilter from rest_framework.response import Response from rest_framework.decorators import api_view,authentication_classes,permission_classes,action from", "import Response from rest_framework.decorators import api_view,authentication_classes,permission_classes,action from common.custom import CommonPagination,", "= ('id',) authentication_classes = (JSONWebTokenAuthentication,) permission_classes = (IsAuthenticated,) filter_fields =", "import api_view,authentication_classes,permission_classes,action from common.custom import CommonPagination, RbacPermission from django_filters.rest_framework import", "import XopsResponse from rest_xops.code import * from django.db.models import Q", "ordering_fields = ('id',) authentication_classes = (JSONWebTokenAuthentication,) permission_classes = (IsAuthenticated,) filter_fields", "..models import {model_camel_case_name} from django.contrib.contenttypes.models import ContentType from ..serializers.{model_name}_serializers import", "CommonPagination ordering_fields = ('id',) authentication_classes = (JSONWebTokenAuthentication,) permission_classes = (IsAuthenticated,)", "rest_framework.decorators import api_view,authentication_classes,permission_classes,action from common.custom import CommonPagination, RbacPermission from django_filters.rest_framework", "from ..serializers.{model_name}_serializers import * class {model_camel_case_name}View(ModelViewSet): queryset = {model_camel_case_name}.objects.all() serializer_class", "import CommonPagination, RbacPermission from django_filters.rest_framework import DjangoFilterBackend from django.http import", "= (DjangoFilterBackend, SearchFilter,OrderingFilter) pagination_class = CommonPagination ordering_fields = ('id',) authentication_classes", "pagination_class = CommonPagination ordering_fields = ('id',) authentication_classes = (JSONWebTokenAuthentication,) permission_classes", "CommonPagination, RbacPermission from django_filters.rest_framework import DjangoFilterBackend from django.http import HttpResponse,FileResponse,JsonResponse", "from rest_framework.viewsets import ModelViewSet from rest_framework.generics import ListAPIView from rest_framework.filters", "import DjangoFilterBackend from django.http import HttpResponse,FileResponse,JsonResponse from rest_framework_jwt.authentication import JSONWebTokenAuthentication", "serializer_class = {model_camel_case_name}Serializer filter_backends = (DjangoFilterBackend, SearchFilter,OrderingFilter) pagination_class = CommonPagination" ]
[ "def __init__(self, *args, **kwargs): super(HeartbeatCollector, self).__init__(*args, **kwargs) self.hostname = self.get_hostname()", "self.config: self.connection_timeout = int(self.config['netuitive_connection_timeout']) self.api = netuitive.Client(url=self.config['netuitive_url'], api_key=self.config['netuitive_api_key'], agent=self.version, connection_timeout=self.connection_timeout)", "None #### Usage Add the collector config as : enabled", "collector disabled') self.enabled = False return try: self.version = self._get_version()", "import load_config as load_server_config try: import netuitive except ImportError: netuitive", "as : - metrics.heartbeat Netuitive Change History ======================== DVG 2016/11/14", "Exception as e: self.log.debug(e) def collect(self): check = netuitive.Check('heartbeat', self.hostname,", "the collector config as : enabled = True path =", "enabled = True path = netuitive Metrics are collected as", "invoked. #### Dependencies None #### Usage Add the collector config", "agent=self.version, connection_timeout=self.connection_timeout) except Exception as e: self.log.debug(e) def collect(self): check", "\"\"\" import diamond.collector from diamond.utils.config import load_config as load_server_config try:", "try: import netuitive except ImportError: netuitive = None class HeartbeatCollector(diamond.collector.Collector):", "False return try: self.version = self._get_version() if 'netuitive_connection_timeout' in self.config:", "if not netuitive: self.log.error('netuitive import failed. Heartbeat collector disabled') self.enabled", "except ImportError: netuitive = None class HeartbeatCollector(diamond.collector.Collector): def __init__(self, *args,", "*args, **kwargs): super(HeartbeatCollector, self).__init__(*args, **kwargs) self.hostname = self.get_hostname() self.ttl =", "heartbeat every time this collector is invoked. #### Dependencies None", "class HeartbeatCollector(diamond.collector.Collector): def __init__(self, *args, **kwargs): super(HeartbeatCollector, self).__init__(*args, **kwargs) self.hostname", "as : enabled = True path = netuitive Metrics are", "self.config['ttl'] self.connection_timeout = 5 if not netuitive: self.log.error('netuitive import failed.", "Initial version. \"\"\" import diamond.collector from diamond.utils.config import load_config as", "= None class HeartbeatCollector(diamond.collector.Collector): def __init__(self, *args, **kwargs): super(HeartbeatCollector, self).__init__(*args,", "api_key=self.config['netuitive_api_key'], agent=self.version, connection_timeout=self.connection_timeout) except Exception as e: self.log.debug(e) def collect(self):", "int(self.config['netuitive_connection_timeout']) self.api = netuitive.Client(url=self.config['netuitive_url'], api_key=self.config['netuitive_api_key'], agent=self.version, connection_timeout=self.connection_timeout) except Exception as", "\"\"\" Send a value of 1 as a heartbeat every", "this collector is invoked. #### Dependencies None #### Usage Add", "2016/11/14 Initial version. \"\"\" import diamond.collector from diamond.utils.config import load_config", "is invoked. #### Dependencies None #### Usage Add the collector", "as e: self.log.debug(e) def collect(self): check = netuitive.Check('heartbeat', self.hostname, self.ttl)", "= 5 if not netuitive: self.log.error('netuitive import failed. Heartbeat collector", "self.version = self._get_version() if 'netuitive_connection_timeout' in self.config: self.connection_timeout = int(self.config['netuitive_connection_timeout'])", "netuitive.Client(url=self.config['netuitive_url'], api_key=self.config['netuitive_api_key'], agent=self.version, connection_timeout=self.connection_timeout) except Exception as e: self.log.debug(e) def", "import diamond.collector from diamond.utils.config import load_config as load_server_config try: import", "= self._get_version() if 'netuitive_connection_timeout' in self.config: self.connection_timeout = int(self.config['netuitive_connection_timeout']) self.api", "failed. Heartbeat collector disabled') self.enabled = False return try: self.version", "None class HeartbeatCollector(diamond.collector.Collector): def __init__(self, *args, **kwargs): super(HeartbeatCollector, self).__init__(*args, **kwargs)", "of 1 as a heartbeat every time this collector is", "value of 1 as a heartbeat every time this collector", "Metrics are collected as : - metrics.heartbeat Netuitive Change History", "a value of 1 as a heartbeat every time this", "- metrics.heartbeat Netuitive Change History ======================== DVG 2016/11/14 Initial version.", "= self.get_hostname() self.ttl = self.config['ttl'] self.connection_timeout = 5 if not", "**kwargs): super(HeartbeatCollector, self).__init__(*args, **kwargs) self.hostname = self.get_hostname() self.ttl = self.config['ttl']", "load_server_config try: import netuitive except ImportError: netuitive = None class", "__init__(self, *args, **kwargs): super(HeartbeatCollector, self).__init__(*args, **kwargs) self.hostname = self.get_hostname() self.ttl", "5 if not netuitive: self.log.error('netuitive import failed. Heartbeat collector disabled')", "a heartbeat every time this collector is invoked. #### Dependencies", "# coding=utf-8 \"\"\" Send a value of 1 as a", "= False return try: self.version = self._get_version() if 'netuitive_connection_timeout' in", ": enabled = True path = netuitive Metrics are collected", "self.enabled = False return try: self.version = self._get_version() if 'netuitive_connection_timeout'", "netuitive except ImportError: netuitive = None class HeartbeatCollector(diamond.collector.Collector): def __init__(self,", "metrics.heartbeat Netuitive Change History ======================== DVG 2016/11/14 Initial version. \"\"\"", "collector config as : enabled = True path = netuitive", "config as : enabled = True path = netuitive Metrics", "self.api = netuitive.Client(url=self.config['netuitive_url'], api_key=self.config['netuitive_api_key'], agent=self.version, connection_timeout=self.connection_timeout) except Exception as e:", "self).__init__(*args, **kwargs) self.hostname = self.get_hostname() self.ttl = self.config['ttl'] self.connection_timeout =", "Netuitive Change History ======================== DVG 2016/11/14 Initial version. \"\"\" import", "= int(self.config['netuitive_connection_timeout']) self.api = netuitive.Client(url=self.config['netuitive_url'], api_key=self.config['netuitive_api_key'], agent=self.version, connection_timeout=self.connection_timeout) except Exception", "collector is invoked. #### Dependencies None #### Usage Add the", "self.connection_timeout = 5 if not netuitive: self.log.error('netuitive import failed. Heartbeat", "netuitive = None class HeartbeatCollector(diamond.collector.Collector): def __init__(self, *args, **kwargs): super(HeartbeatCollector,", "coding=utf-8 \"\"\" Send a value of 1 as a heartbeat", "self._get_version() if 'netuitive_connection_timeout' in self.config: self.connection_timeout = int(self.config['netuitive_connection_timeout']) self.api =", "= netuitive.Client(url=self.config['netuitive_url'], api_key=self.config['netuitive_api_key'], agent=self.version, connection_timeout=self.connection_timeout) except Exception as e: self.log.debug(e)", "diamond.utils.config import load_config as load_server_config try: import netuitive except ImportError:", "every time this collector is invoked. #### Dependencies None ####", "path = netuitive Metrics are collected as : - metrics.heartbeat", "Send a value of 1 as a heartbeat every time", "'netuitive_connection_timeout' in self.config: self.connection_timeout = int(self.config['netuitive_connection_timeout']) self.api = netuitive.Client(url=self.config['netuitive_url'], api_key=self.config['netuitive_api_key'],", "1 as a heartbeat every time this collector is invoked.", "Add the collector config as : enabled = True path", "disabled') self.enabled = False return try: self.version = self._get_version() if", "DVG 2016/11/14 Initial version. \"\"\" import diamond.collector from diamond.utils.config import", "except Exception as e: self.log.debug(e) def collect(self): check = netuitive.Check('heartbeat',", "diamond.collector from diamond.utils.config import load_config as load_server_config try: import netuitive", "#### Dependencies None #### Usage Add the collector config as", "= netuitive Metrics are collected as : - metrics.heartbeat Netuitive", "ImportError: netuitive = None class HeartbeatCollector(diamond.collector.Collector): def __init__(self, *args, **kwargs):", "e: self.log.debug(e) def collect(self): check = netuitive.Check('heartbeat', self.hostname, self.ttl) self.api.post_check(check)", "Dependencies None #### Usage Add the collector config as :", "self.get_hostname() self.ttl = self.config['ttl'] self.connection_timeout = 5 if not netuitive:", "import failed. Heartbeat collector disabled') self.enabled = False return try:", "return try: self.version = self._get_version() if 'netuitive_connection_timeout' in self.config: self.connection_timeout", "HeartbeatCollector(diamond.collector.Collector): def __init__(self, *args, **kwargs): super(HeartbeatCollector, self).__init__(*args, **kwargs) self.hostname =", "try: self.version = self._get_version() if 'netuitive_connection_timeout' in self.config: self.connection_timeout =", "Heartbeat collector disabled') self.enabled = False return try: self.version =", "History ======================== DVG 2016/11/14 Initial version. \"\"\" import diamond.collector from", "self.hostname = self.get_hostname() self.ttl = self.config['ttl'] self.connection_timeout = 5 if", "not netuitive: self.log.error('netuitive import failed. Heartbeat collector disabled') self.enabled =", "True path = netuitive Metrics are collected as : -", "Change History ======================== DVG 2016/11/14 Initial version. \"\"\" import diamond.collector", "= self.config['ttl'] self.connection_timeout = 5 if not netuitive: self.log.error('netuitive import", "import netuitive except ImportError: netuitive = None class HeartbeatCollector(diamond.collector.Collector): def", "load_config as load_server_config try: import netuitive except ImportError: netuitive =", "time this collector is invoked. #### Dependencies None #### Usage", "version. \"\"\" import diamond.collector from diamond.utils.config import load_config as load_server_config", "#### Usage Add the collector config as : enabled =", "netuitive: self.log.error('netuitive import failed. Heartbeat collector disabled') self.enabled = False", "super(HeartbeatCollector, self).__init__(*args, **kwargs) self.hostname = self.get_hostname() self.ttl = self.config['ttl'] self.connection_timeout", "in self.config: self.connection_timeout = int(self.config['netuitive_connection_timeout']) self.api = netuitive.Client(url=self.config['netuitive_url'], api_key=self.config['netuitive_api_key'], agent=self.version,", "as load_server_config try: import netuitive except ImportError: netuitive = None", ": - metrics.heartbeat Netuitive Change History ======================== DVG 2016/11/14 Initial", "if 'netuitive_connection_timeout' in self.config: self.connection_timeout = int(self.config['netuitive_connection_timeout']) self.api = netuitive.Client(url=self.config['netuitive_url'],", "= True path = netuitive Metrics are collected as :", "netuitive Metrics are collected as : - metrics.heartbeat Netuitive Change", "collected as : - metrics.heartbeat Netuitive Change History ======================== DVG", "self.ttl = self.config['ttl'] self.connection_timeout = 5 if not netuitive: self.log.error('netuitive", "self.log.error('netuitive import failed. Heartbeat collector disabled') self.enabled = False return", "as a heartbeat every time this collector is invoked. ####", "Usage Add the collector config as : enabled = True", "connection_timeout=self.connection_timeout) except Exception as e: self.log.debug(e) def collect(self): check =", "are collected as : - metrics.heartbeat Netuitive Change History ========================", "self.connection_timeout = int(self.config['netuitive_connection_timeout']) self.api = netuitive.Client(url=self.config['netuitive_url'], api_key=self.config['netuitive_api_key'], agent=self.version, connection_timeout=self.connection_timeout) except", "======================== DVG 2016/11/14 Initial version. \"\"\" import diamond.collector from diamond.utils.config", "<reponame>art19/netuitive-diamond<filename>src/collectors/heartbeat/heartbeat.py # coding=utf-8 \"\"\" Send a value of 1 as", "**kwargs) self.hostname = self.get_hostname() self.ttl = self.config['ttl'] self.connection_timeout = 5", "from diamond.utils.config import load_config as load_server_config try: import netuitive except" ]
[ "[exp2_Delta.min(), exp2_Delta.median(), exp2_Delta.std()]) stat2_Delta.index = [\"Best\", \"Median\", \"Std.\"] # find", "# print(\"{}.Hypervolume:\".format(market), # best2_Hypervolume[0], best2_Hypervolume[1]) # TODO: check error print(\"{}\\n----------------------------------------------\".format(market))", "and second best algorithm meds1_Spacing = stat1_Spacing.loc[\"Median\"].sort_values() best1_Spacing = list(meds1_Spacing.index[:2])", "stat1_GD.loc[\"Median\"].sort_values() best1_GD = list(meds1_GD.index[:2]) meds2_GD = stat2_GD.loc[\"Median\"].sort_values() best2_GD = list(meds2_GD.index[:2])", "print(\"{}.MaxSpread:\".format(market), best2_MaxSpread[0], best2_MaxSpread[1]) # TODO: check error # read Delta", "\"Median\", \"Std.\"] stat2_MaxSpread = pd.DataFrame( [exp2_MaxSpread.max(), exp2_MaxSpread.median(), exp2_MaxSpread.std()]) stat2_MaxSpread.index =", "best algorithm meds1_MaxSpread = stat1_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best1_MaxSpread = list(meds1_MaxSpread.index[:2]) meds2_MaxSpread =", "stat2_Delta, stat2_IGD, stat2_Hypervolume]) arrays = [[\"GD\", \"GD\", \"GD\", \"Spacing\", \"Spacing\",", "# read MaxSpread data file dat = pd.read_csv(\"./num_res/{}.MaxSpread.csv\".format(market)) # split", "= list(meds1_GD.index[:2]) meds2_GD = stat2_GD.loc[\"Median\"].sort_values() best2_GD = list(meds2_GD.index[:2]) print(\"{}.GD:\".format(market), best1_GD[0],", "# TODO: check error # read Delta data file dat", "= dat[dat.columns[:5]] exp2_IGD = dat[dat.columns[5:]] # calculate statistics stat1_IGD =", "\"Std.\"] # find best and second best algorithm meds1_IGD =", "list(meds2_MaxSpread.index[:2]) print(\"{}.MaxSpread:\".format(market), best1_MaxSpread[0], best1_MaxSpread[1]) # print(\"{}.MaxSpread:\".format(market), best2_MaxSpread[0], best2_MaxSpread[1]) # TODO:", "# split into two experiments exp1_IGD = dat[dat.columns[:5]] exp2_IGD =", "stat2_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"] # find best and second", "print(\"{}.GD:\".format(market), best1_GD[0], best1_GD[1]) # print(\"{}.GD:\".format(market), best2_GD[0], best2_GD[1]) # TODO: check", "= dat[dat.columns[5:]] # calculate statistics stat1_IGD = pd.DataFrame([exp1_IGD.min(), exp1_IGD.median(), exp1_IGD.std()])", "best and second best algorithm meds1_Spacing = stat1_Spacing.loc[\"Median\"].sort_values() best1_Spacing =", "# TODO: check error print(\"{}\\n----------------------------------------------\".format(market)) pd.options.display.float_format = '{:.2e}'.format stat1_overall =", "check error # read MaxSpread data file dat = pd.read_csv(\"./num_res/{}.MaxSpread.csv\".format(market))", "read Delta data file dat = pd.read_csv(\"./num_res/{}.Delta.csv\".format(market)) # split into", "read MaxSpread data file dat = pd.read_csv(\"./num_res/{}.MaxSpread.csv\".format(market)) # split into", "read Hypervolume data file dat = pd.read_csv(\"./num_res/{}.Hypervolume.csv\".format(market)) # split into", "stat2_GD.loc[\"Median\"].sort_values() best2_GD = list(meds2_GD.index[:2]) print(\"{}.GD:\".format(market), best1_GD[0], best1_GD[1]) # print(\"{}.GD:\".format(market), best2_GD[0],", "Delta data file dat = pd.read_csv(\"./num_res/{}.Delta.csv\".format(market)) # split into two", "check error print(\"{}\\n----------------------------------------------\".format(market)) pd.options.display.float_format = '{:.2e}'.format stat1_overall = pd.concat( [stat1_GD,", "print(\"{}.MaxSpread:\".format(market), best1_MaxSpread[0], best1_MaxSpread[1]) # print(\"{}.MaxSpread:\".format(market), best2_MaxSpread[0], best2_MaxSpread[1]) # TODO: check", "= pd.DataFrame( [exp1_Spacing.min(), exp1_Spacing.median(), exp1_Spacing.std()]) stat1_Spacing.index = [\"Best\", \"Median\", \"Std.\"]", "best2_Hypervolume[1]) # TODO: check error print(\"{}\\n----------------------------------------------\".format(market)) pd.options.display.float_format = '{:.2e}'.format stat1_overall", "stat1_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best1_MaxSpread = list(meds1_MaxSpread.index[:2]) meds2_MaxSpread = stat2_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best2_MaxSpread = list(meds2_MaxSpread.index[:2])", "stat1_GD = pd.DataFrame([exp1_GD.min(), exp1_GD.median(), exp1_GD.std()]) stat1_GD.index = [\"Best\", \"Median\", \"Std.\"]", "calculate statistics stat1_Spacing = pd.DataFrame( [exp1_Spacing.min(), exp1_Spacing.median(), exp1_Spacing.std()]) stat1_Spacing.index =", "\"Std.\"] stat2_Hypervolume = pd.DataFrame( [exp2_Hypervolume.max(), exp2_Hypervolume.median(), exp2_Hypervolume.std()]) stat2_Hypervolume.index = [\"Best\",", "split into two experiments exp1_MaxSpread = dat[dat.columns[:5]] exp2_MaxSpread = dat[dat.columns[5:]]", "\"nikkei\"] market = markets[int(sys.argv[1])-1] # read GD data file dat", "IGD data file dat = pd.read_csv(\"./num_res/{}.IGD.csv\".format(market)) # split into two", "\"Median\", \"Std.\"] stat2_IGD = pd.DataFrame([exp2_IGD.min(), exp2_IGD.median(), exp2_IGD.std()]) stat2_IGD.index = [\"Best\",", "list(meds1_Spacing.index[:2]) meds2_Spacing = stat2_Spacing.loc[\"Median\"].sort_values() best2_Spacing = list(meds2_Spacing.index[:2]) print(\"{}.Spacing:\".format(market), best1_Spacing[0], best1_Spacing[1])", "= list(meds2_MaxSpread.index[:2]) print(\"{}.MaxSpread:\".format(market), best1_MaxSpread[0], best1_MaxSpread[1]) # print(\"{}.MaxSpread:\".format(market), best2_MaxSpread[0], best2_MaxSpread[1]) #", "dat[dat.columns[5:]] # calculate statistics stat1_Delta = pd.DataFrame( [exp1_Delta.min(), exp1_Delta.median(), exp1_Delta.std()])", "= [\"Best\", \"Median\", \"Std.\"] stat2_IGD = pd.DataFrame([exp2_IGD.min(), exp2_IGD.median(), exp2_IGD.std()]) stat2_IGD.index", "calculate statistics stat1_GD = pd.DataFrame([exp1_GD.min(), exp1_GD.median(), exp1_GD.std()]) stat1_GD.index = [\"Best\",", "read GD data file dat = pd.read_csv(\"./num_res/{}.GD.csv\".format(market)) # split into", "\"Hypervolume\"], stat1_overall.index ] index = pd.MultiIndex.from_arrays(arrays, names=[\"Metric\", \"\"]) stat1_overall.index =", "best1_IGD[0], best1_IGD[1]) # print(\"{}.IGD:\".format(market), best2_IGD[0], best2_IGD[1]) # TODO: check error", "meds2_MaxSpread = stat2_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best2_MaxSpread = list(meds2_MaxSpread.index[:2]) print(\"{}.MaxSpread:\".format(market), best1_MaxSpread[0], best1_MaxSpread[1]) #", "best2_Hypervolume[0], best2_Hypervolume[1]) # TODO: check error print(\"{}\\n----------------------------------------------\".format(market)) pd.options.display.float_format = '{:.2e}'.format", "second best algorithm meds1_MaxSpread = stat1_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best1_MaxSpread = list(meds1_MaxSpread.index[:2]) meds2_MaxSpread", "[\"Best\", \"Median\", \"Std.\"] # find best and second best algorithm", "# calculate statistics stat1_IGD = pd.DataFrame([exp1_IGD.min(), exp1_IGD.median(), exp1_IGD.std()]) stat1_IGD.index =", "= pd.DataFrame( [exp2_Hypervolume.max(), exp2_Hypervolume.median(), exp2_Hypervolume.std()]) stat2_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"]", "# find best and second best algorithm meds1_IGD = stat1_IGD.loc[\"Median\"].sort_values()", "best1_MaxSpread = list(meds1_MaxSpread.index[:2]) meds2_MaxSpread = stat2_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best2_MaxSpread = list(meds2_MaxSpread.index[:2]) print(\"{}.MaxSpread:\".format(market),", "\"Median\", \"Std.\"] # find best and second best algorithm meds1_Hypervolume", "best and second best algorithm meds1_Delta = stat1_Delta.loc[\"Median\"].sort_values() best1_Delta =", "split into two experiments exp1_Delta = dat[dat.columns[:5]] exp2_Delta = dat[dat.columns[5:]]", "best2_GD[1]) # TODO: check error # read Spacing data file", "pd.DataFrame( [exp2_Spacing.min(), exp2_Spacing.median(), exp2_Spacing.std()]) stat2_Spacing.index = [\"Best\", \"Median\", \"Std.\"] #", "= stat1_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best1_Hypervolume = list(meds1_Hypervolume.index[:2]) meds2_Hypervolume = stat2_Hypervolume.loc[\"Median\"].sort_values( ascending=False)", "statistics stat1_Delta = pd.DataFrame( [exp1_Delta.min(), exp1_Delta.median(), exp1_Delta.std()]) stat1_Delta.index = [\"Best\",", "\"Median\", \"Std.\"] stat2_GD = pd.DataFrame([exp2_GD.min(), exp2_GD.median(), exp2_GD.std()]) stat2_GD.index = [\"Best\",", "data file dat = pd.read_csv(\"./num_res/{}.Spacing.csv\".format(market)) # split into two experiments", "= pd.DataFrame( [exp2_Delta.min(), exp2_Delta.median(), exp2_Delta.std()]) stat2_Delta.index = [\"Best\", \"Median\", \"Std.\"]", "TODO: check error # read Hypervolume data file dat =", "stat1_IGD, stat1_Hypervolume]) stat2_overall = pd.concat( [stat2_GD, stat2_Spacing, stat2_MaxSpread, stat2_Delta, stat2_IGD,", "= list(meds1_Delta.index[:2]) meds2_Delta = stat2_Delta.loc[\"Median\"].sort_values() best2_Delta = list(meds2_Delta.index[:2]) print(\"{}.Delta:\".format(market), best1_Delta[0],", "Hypervolume data file dat = pd.read_csv(\"./num_res/{}.Hypervolume.csv\".format(market)) # split into two", "check error # read IGD data file dat = pd.read_csv(\"./num_res/{}.IGD.csv\".format(market))", "meds2_Hypervolume = stat2_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best2_Hypervolume = list(meds2_Hypervolume.index[:2]) print(\"{}.Hypervolume:\".format(market), best1_Hypervolume[0], best1_Hypervolume[1])", "stat2_IGD = pd.DataFrame([exp2_IGD.min(), exp2_IGD.median(), exp2_IGD.std()]) stat2_IGD.index = [\"Best\", \"Median\", \"Std.\"]", "dat[dat.columns[:5]] exp2_Spacing = dat[dat.columns[5:]] # calculate statistics stat1_Spacing = pd.DataFrame(", "find best and second best algorithm meds1_Delta = stat1_Delta.loc[\"Median\"].sort_values() best1_Delta", "= pd.MultiIndex.from_arrays(arrays, names=[\"Metric\", \"\"]) stat1_overall.index = index stat2_overall.index = index", "second best algorithm meds1_Spacing = stat1_Spacing.loc[\"Median\"].sort_values() best1_Spacing = list(meds1_Spacing.index[:2]) meds2_Spacing", "error # read MaxSpread data file dat = pd.read_csv(\"./num_res/{}.MaxSpread.csv\".format(market)) #", "= pd.DataFrame([exp1_IGD.min(), exp1_IGD.median(), exp1_IGD.std()]) stat1_IGD.index = [\"Best\", \"Median\", \"Std.\"] stat2_IGD", "best1_IGD[1]) # print(\"{}.IGD:\".format(market), best2_IGD[0], best2_IGD[1]) # TODO: check error #", "[\"Best\", \"Median\", \"Std.\"] stat2_GD = pd.DataFrame([exp2_GD.min(), exp2_GD.median(), exp2_GD.std()]) stat2_GD.index =", "stat1_overall.index ] index = pd.MultiIndex.from_arrays(arrays, names=[\"Metric\", \"\"]) stat1_overall.index = index", "names=[\"Metric\", \"\"]) stat1_overall.index = index stat2_overall.index = index print(stat1_overall) print(\"----------------------------------------------\")", "dat[dat.columns[:5]] exp2_Hypervolume = dat[dat.columns[5:]] # calculate statistics stat1_Hypervolume = pd.DataFrame(", "# print(\"{}.MaxSpread:\".format(market), best2_MaxSpread[0], best2_MaxSpread[1]) # TODO: check error # read", "\"Spacing\", \"MaxSpread\", \"MaxSpread\", \"MaxSpread\", \"Delta\", \"Delta\", \"Delta\", \"IGD\", \"IGD\", \"IGD\",", "\"Std.\"] # find best and second best algorithm meds1_Spacing =", "\"Delta\", \"Delta\", \"IGD\", \"IGD\", \"IGD\", \"Hypervolume\", \"Hypervolume\", \"Hypervolume\"], stat1_overall.index ]", "print(\"{}.IGD:\".format(market), best1_IGD[0], best1_IGD[1]) # print(\"{}.IGD:\".format(market), best2_IGD[0], best2_IGD[1]) # TODO: check", "= stat2_GD.loc[\"Median\"].sort_values() best2_GD = list(meds2_GD.index[:2]) print(\"{}.GD:\".format(market), best1_GD[0], best1_GD[1]) # print(\"{}.GD:\".format(market),", "exp1_Delta = dat[dat.columns[:5]] exp2_Delta = dat[dat.columns[5:]] # calculate statistics stat1_Delta", "= pd.DataFrame( [exp1_Delta.min(), exp1_Delta.median(), exp1_Delta.std()]) stat1_Delta.index = [\"Best\", \"Median\", \"Std.\"]", "import pandas as pd import sys markets = [\"hangseng\", \"dax\",", "# calculate statistics stat1_Spacing = pd.DataFrame( [exp1_Spacing.min(), exp1_Spacing.median(), exp1_Spacing.std()]) stat1_Spacing.index", "into two experiments exp1_MaxSpread = dat[dat.columns[:5]] exp2_MaxSpread = dat[dat.columns[5:]] #", "data file dat = pd.read_csv(\"./num_res/{}.Hypervolume.csv\".format(market)) # split into two experiments", "and second best algorithm meds1_GD = stat1_GD.loc[\"Median\"].sort_values() best1_GD = list(meds1_GD.index[:2])", "\"Std.\"] stat2_IGD = pd.DataFrame([exp2_IGD.min(), exp2_IGD.median(), exp2_IGD.std()]) stat2_IGD.index = [\"Best\", \"Median\",", "meds1_IGD = stat1_IGD.loc[\"Median\"].sort_values() best1_IGD = list(meds1_IGD.index[:2]) meds2_IGD = stat2_IGD.loc[\"Median\"].sort_values() best2_IGD", "two experiments exp1_MaxSpread = dat[dat.columns[:5]] exp2_MaxSpread = dat[dat.columns[5:]] # calculate", "stat2_MaxSpread, stat2_Delta, stat2_IGD, stat2_Hypervolume]) arrays = [[\"GD\", \"GD\", \"GD\", \"Spacing\",", "exp2_Delta.median(), exp2_Delta.std()]) stat2_Delta.index = [\"Best\", \"Median\", \"Std.\"] # find best", "= dat[dat.columns[5:]] # calculate statistics stat1_Delta = pd.DataFrame( [exp1_Delta.min(), exp1_Delta.median(),", "split into two experiments exp1_Hypervolume = dat[dat.columns[:5]] exp2_Hypervolume = dat[dat.columns[5:]]", "best2_MaxSpread = list(meds2_MaxSpread.index[:2]) print(\"{}.MaxSpread:\".format(market), best1_MaxSpread[0], best1_MaxSpread[1]) # print(\"{}.MaxSpread:\".format(market), best2_MaxSpread[0], best2_MaxSpread[1])", "algorithm meds1_Delta = stat1_Delta.loc[\"Median\"].sort_values() best1_Delta = list(meds1_Delta.index[:2]) meds2_Delta = stat2_Delta.loc[\"Median\"].sort_values()", "[\"Best\", \"Median\", \"Std.\"] stat2_MaxSpread = pd.DataFrame( [exp2_MaxSpread.max(), exp2_MaxSpread.median(), exp2_MaxSpread.std()]) stat2_MaxSpread.index", "pd.DataFrame([exp2_IGD.min(), exp2_IGD.median(), exp2_IGD.std()]) stat2_IGD.index = [\"Best\", \"Median\", \"Std.\"] # find", "dat[dat.columns[5:]] # calculate statistics stat1_Hypervolume = pd.DataFrame( [exp1_Hypervolume.max(), exp1_Hypervolume.median(), exp1_Hypervolume.std()])", "check error # read Spacing data file dat = pd.read_csv(\"./num_res/{}.Spacing.csv\".format(market))", "best algorithm meds1_GD = stat1_GD.loc[\"Median\"].sort_values() best1_GD = list(meds1_GD.index[:2]) meds2_GD =", "statistics stat1_IGD = pd.DataFrame([exp1_IGD.min(), exp1_IGD.median(), exp1_IGD.std()]) stat1_IGD.index = [\"Best\", \"Median\",", "# split into two experiments exp1_Spacing = dat[dat.columns[:5]] exp2_Spacing =", "best and second best algorithm meds1_MaxSpread = stat1_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best1_MaxSpread =", "= stat2_Delta.loc[\"Median\"].sort_values() best2_Delta = list(meds2_Delta.index[:2]) print(\"{}.Delta:\".format(market), best1_Delta[0], best1_Delta[1]) # print(\"{}.Delta:\".format(market),", "dat[dat.columns[5:]] # calculate statistics stat1_Spacing = pd.DataFrame( [exp1_Spacing.min(), exp1_Spacing.median(), exp1_Spacing.std()])", "Spacing data file dat = pd.read_csv(\"./num_res/{}.Spacing.csv\".format(market)) # split into two", "= stat1_Delta.loc[\"Median\"].sort_values() best1_Delta = list(meds1_Delta.index[:2]) meds2_Delta = stat2_Delta.loc[\"Median\"].sort_values() best2_Delta =", "into two experiments exp1_Delta = dat[dat.columns[:5]] exp2_Delta = dat[dat.columns[5:]] #", "# split into two experiments exp1_MaxSpread = dat[dat.columns[:5]] exp2_MaxSpread =", "\"Median\", \"Std.\"] # find best and second best algorithm meds1_IGD", "experiments exp1_MaxSpread = dat[dat.columns[:5]] exp2_MaxSpread = dat[dat.columns[5:]] # calculate statistics", "[exp1_Hypervolume.max(), exp1_Hypervolume.median(), exp1_Hypervolume.std()]) stat1_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"] stat2_Hypervolume =", "find best and second best algorithm meds1_MaxSpread = stat1_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best1_MaxSpread", "second best algorithm meds1_IGD = stat1_IGD.loc[\"Median\"].sort_values() best1_IGD = list(meds1_IGD.index[:2]) meds2_IGD", "statistics stat1_GD = pd.DataFrame([exp1_GD.min(), exp1_GD.median(), exp1_GD.std()]) stat1_GD.index = [\"Best\", \"Median\",", "arrays = [[\"GD\", \"GD\", \"GD\", \"Spacing\", \"Spacing\", \"Spacing\", \"MaxSpread\", \"MaxSpread\",", "list(meds2_Hypervolume.index[:2]) print(\"{}.Hypervolume:\".format(market), best1_Hypervolume[0], best1_Hypervolume[1]) # print(\"{}.Hypervolume:\".format(market), # best2_Hypervolume[0], best2_Hypervolume[1]) #", "best2_IGD[0], best2_IGD[1]) # TODO: check error # read Hypervolume data", "best2_GD = list(meds2_GD.index[:2]) print(\"{}.GD:\".format(market), best1_GD[0], best1_GD[1]) # print(\"{}.GD:\".format(market), best2_GD[0], best2_GD[1])", "MaxSpread data file dat = pd.read_csv(\"./num_res/{}.MaxSpread.csv\".format(market)) # split into two", "list(meds1_Delta.index[:2]) meds2_Delta = stat2_Delta.loc[\"Median\"].sort_values() best2_Delta = list(meds2_Delta.index[:2]) print(\"{}.Delta:\".format(market), best1_Delta[0], best1_Delta[1])", "exp1_Spacing.median(), exp1_Spacing.std()]) stat1_Spacing.index = [\"Best\", \"Median\", \"Std.\"] stat2_Spacing = pd.DataFrame(", "pd.DataFrame( [exp1_Hypervolume.max(), exp1_Hypervolume.median(), exp1_Hypervolume.std()]) stat1_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"] stat2_Hypervolume", "pd.DataFrame([exp1_GD.min(), exp1_GD.median(), exp1_GD.std()]) stat1_GD.index = [\"Best\", \"Median\", \"Std.\"] stat2_GD =", "\"Std.\"] stat2_GD = pd.DataFrame([exp2_GD.min(), exp2_GD.median(), exp2_GD.std()]) stat2_GD.index = [\"Best\", \"Median\",", "= dat[dat.columns[:5]] exp2_MaxSpread = dat[dat.columns[5:]] # calculate statistics stat1_MaxSpread =", "[\"hangseng\", \"dax\", \"ftse\", \"sp\", \"nikkei\"] market = markets[int(sys.argv[1])-1] # read", "find best and second best algorithm meds1_IGD = stat1_IGD.loc[\"Median\"].sort_values() best1_IGD", "'{:.2e}'.format stat1_overall = pd.concat( [stat1_GD, stat1_Spacing, stat1_MaxSpread, stat1_Delta, stat1_IGD, stat1_Hypervolume])", "= pd.read_csv(\"./num_res/{}.Spacing.csv\".format(market)) # split into two experiments exp1_Spacing = dat[dat.columns[:5]]", "pd.MultiIndex.from_arrays(arrays, names=[\"Metric\", \"\"]) stat1_overall.index = index stat2_overall.index = index print(stat1_overall)", "\"Hypervolume\", \"Hypervolume\", \"Hypervolume\"], stat1_overall.index ] index = pd.MultiIndex.from_arrays(arrays, names=[\"Metric\", \"\"])", "stat2_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best2_MaxSpread = list(meds2_MaxSpread.index[:2]) print(\"{}.MaxSpread:\".format(market), best1_MaxSpread[0], best1_MaxSpread[1]) # print(\"{}.MaxSpread:\".format(market), best2_MaxSpread[0],", "stat2_Delta.loc[\"Median\"].sort_values() best2_Delta = list(meds2_Delta.index[:2]) print(\"{}.Delta:\".format(market), best1_Delta[0], best1_Delta[1]) # print(\"{}.Delta:\".format(market), best2_Delta[0],", "stat2_Delta = pd.DataFrame( [exp2_Delta.min(), exp2_Delta.median(), exp2_Delta.std()]) stat2_Delta.index = [\"Best\", \"Median\",", "meds1_Spacing = stat1_Spacing.loc[\"Median\"].sort_values() best1_Spacing = list(meds1_Spacing.index[:2]) meds2_Spacing = stat2_Spacing.loc[\"Median\"].sort_values() best2_Spacing", "best2_IGD[1]) # TODO: check error # read Hypervolume data file", "market = markets[int(sys.argv[1])-1] # read GD data file dat =", "pd.DataFrame( [exp2_Hypervolume.max(), exp2_Hypervolume.median(), exp2_Hypervolume.std()]) stat2_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"] #", "calculate statistics stat1_IGD = pd.DataFrame([exp1_IGD.min(), exp1_IGD.median(), exp1_IGD.std()]) stat1_IGD.index = [\"Best\",", "# read Spacing data file dat = pd.read_csv(\"./num_res/{}.Spacing.csv\".format(market)) # split", "list(meds1_IGD.index[:2]) meds2_IGD = stat2_IGD.loc[\"Median\"].sort_values() best2_IGD = list(meds2_IGD.index[:2]) print(\"{}.IGD:\".format(market), best1_IGD[0], best1_IGD[1])", "= [\"Best\", \"Median\", \"Std.\"] stat2_Hypervolume = pd.DataFrame( [exp2_Hypervolume.max(), exp2_Hypervolume.median(), exp2_Hypervolume.std()])", "= list(meds1_Hypervolume.index[:2]) meds2_Hypervolume = stat2_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best2_Hypervolume = list(meds2_Hypervolume.index[:2]) print(\"{}.Hypervolume:\".format(market),", "dat = pd.read_csv(\"./num_res/{}.MaxSpread.csv\".format(market)) # split into two experiments exp1_MaxSpread =", "dat[dat.columns[:5]] exp2_IGD = dat[dat.columns[5:]] # calculate statistics stat1_IGD = pd.DataFrame([exp1_IGD.min(),", "list(meds1_MaxSpread.index[:2]) meds2_MaxSpread = stat2_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best2_MaxSpread = list(meds2_MaxSpread.index[:2]) print(\"{}.MaxSpread:\".format(market), best1_MaxSpread[0], best1_MaxSpread[1])", "\"Std.\"] # find best and second best algorithm meds1_Delta =", "and second best algorithm meds1_Delta = stat1_Delta.loc[\"Median\"].sort_values() best1_Delta = list(meds1_Delta.index[:2])", "= dat[dat.columns[:5]] exp2_Hypervolume = dat[dat.columns[5:]] # calculate statistics stat1_Hypervolume =", "best algorithm meds1_Hypervolume = stat1_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best1_Hypervolume = list(meds1_Hypervolume.index[:2]) meds2_Hypervolume", "# find best and second best algorithm meds1_Spacing = stat1_Spacing.loc[\"Median\"].sort_values()", "\"Median\", \"Std.\"] # find best and second best algorithm meds1_MaxSpread", "[exp1_MaxSpread.max(), exp1_MaxSpread.median(), exp1_MaxSpread.std()]) stat1_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"] stat2_MaxSpread =", "print(\"{}.Spacing:\".format(market), best1_Spacing[0], best1_Spacing[1]) # print(\"{}.Spacing:\".format(market), best2_Spacing[0], best2_Spacing[1]) # TODO: check", "algorithm meds1_MaxSpread = stat1_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best1_MaxSpread = list(meds1_MaxSpread.index[:2]) meds2_MaxSpread = stat2_MaxSpread.loc[\"Median\"].sort_values(ascending=False)", "exp2_Spacing = dat[dat.columns[5:]] # calculate statistics stat1_Spacing = pd.DataFrame( [exp1_Spacing.min(),", "= list(meds2_GD.index[:2]) print(\"{}.GD:\".format(market), best1_GD[0], best1_GD[1]) # print(\"{}.GD:\".format(market), best2_GD[0], best2_GD[1]) #", "exp2_MaxSpread.median(), exp2_MaxSpread.std()]) stat2_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"] # find best", "best1_Hypervolume[1]) # print(\"{}.Hypervolume:\".format(market), # best2_Hypervolume[0], best2_Hypervolume[1]) # TODO: check error", "and second best algorithm meds1_MaxSpread = stat1_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best1_MaxSpread = list(meds1_MaxSpread.index[:2])", "# calculate statistics stat1_Delta = pd.DataFrame( [exp1_Delta.min(), exp1_Delta.median(), exp1_Delta.std()]) stat1_Delta.index", "exp1_GD.std()]) stat1_GD.index = [\"Best\", \"Median\", \"Std.\"] stat2_GD = pd.DataFrame([exp2_GD.min(), exp2_GD.median(),", "exp2_Spacing.median(), exp2_Spacing.std()]) stat2_Spacing.index = [\"Best\", \"Median\", \"Std.\"] # find best", "dat[dat.columns[:5]] exp2_Delta = dat[dat.columns[5:]] # calculate statistics stat1_Delta = pd.DataFrame(", "pandas as pd import sys markets = [\"hangseng\", \"dax\", \"ftse\",", "list(meds1_Hypervolume.index[:2]) meds2_Hypervolume = stat2_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best2_Hypervolume = list(meds2_Hypervolume.index[:2]) print(\"{}.Hypervolume:\".format(market), best1_Hypervolume[0],", "exp1_Hypervolume = dat[dat.columns[:5]] exp2_Hypervolume = dat[dat.columns[5:]] # calculate statistics stat1_Hypervolume", "two experiments exp1_Spacing = dat[dat.columns[:5]] exp2_Spacing = dat[dat.columns[5:]] # calculate", "calculate statistics stat1_Hypervolume = pd.DataFrame( [exp1_Hypervolume.max(), exp1_Hypervolume.median(), exp1_Hypervolume.std()]) stat1_Hypervolume.index =", "algorithm meds1_Spacing = stat1_Spacing.loc[\"Median\"].sort_values() best1_Spacing = list(meds1_Spacing.index[:2]) meds2_Spacing = stat2_Spacing.loc[\"Median\"].sort_values()", "= dat[dat.columns[5:]] # calculate statistics stat1_GD = pd.DataFrame([exp1_GD.min(), exp1_GD.median(), exp1_GD.std()])", "exp1_GD = dat[dat.columns[:5]] exp2_GD = dat[dat.columns[5:]] # calculate statistics stat1_GD", "best1_GD[1]) # print(\"{}.GD:\".format(market), best2_GD[0], best2_GD[1]) # TODO: check error #", "exp1_MaxSpread.median(), exp1_MaxSpread.std()]) stat1_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"] stat2_MaxSpread = pd.DataFrame(", "meds1_Delta = stat1_Delta.loc[\"Median\"].sort_values() best1_Delta = list(meds1_Delta.index[:2]) meds2_Delta = stat2_Delta.loc[\"Median\"].sort_values() best2_Delta", "pd.read_csv(\"./num_res/{}.GD.csv\".format(market)) # split into two experiments exp1_GD = dat[dat.columns[:5]] exp2_GD", "# find best and second best algorithm meds1_GD = stat1_GD.loc[\"Median\"].sort_values()", "# find best and second best algorithm meds1_Delta = stat1_Delta.loc[\"Median\"].sort_values()", "check error # read Delta data file dat = pd.read_csv(\"./num_res/{}.Delta.csv\".format(market))", "exp2_GD.std()]) stat2_GD.index = [\"Best\", \"Median\", \"Std.\"] # find best and", "error # read IGD data file dat = pd.read_csv(\"./num_res/{}.IGD.csv\".format(market)) #", "index = pd.MultiIndex.from_arrays(arrays, names=[\"Metric\", \"\"]) stat1_overall.index = index stat2_overall.index =", "exp1_Delta.median(), exp1_Delta.std()]) stat1_Delta.index = [\"Best\", \"Median\", \"Std.\"] stat2_Delta = pd.DataFrame(", "TODO: check error # read Spacing data file dat =", "meds2_GD = stat2_GD.loc[\"Median\"].sort_values() best2_GD = list(meds2_GD.index[:2]) print(\"{}.GD:\".format(market), best1_GD[0], best1_GD[1]) #", "meds1_GD = stat1_GD.loc[\"Median\"].sort_values() best1_GD = list(meds1_GD.index[:2]) meds2_GD = stat2_GD.loc[\"Median\"].sort_values() best2_GD", "as pd import sys markets = [\"hangseng\", \"dax\", \"ftse\", \"sp\",", "\"ftse\", \"sp\", \"nikkei\"] market = markets[int(sys.argv[1])-1] # read GD data", "stat1_Delta, stat1_IGD, stat1_Hypervolume]) stat2_overall = pd.concat( [stat2_GD, stat2_Spacing, stat2_MaxSpread, stat2_Delta,", "\"sp\", \"nikkei\"] market = markets[int(sys.argv[1])-1] # read GD data file", "# print(\"{}.Spacing:\".format(market), best2_Spacing[0], best2_Spacing[1]) # TODO: check error # read", "best1_Delta[0], best1_Delta[1]) # print(\"{}.Delta:\".format(market), best2_Delta[0], best2_Delta[1]) # TODO: check error", "ascending=False) best1_Hypervolume = list(meds1_Hypervolume.index[:2]) meds2_Hypervolume = stat2_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best2_Hypervolume =", "# read GD data file dat = pd.read_csv(\"./num_res/{}.GD.csv\".format(market)) # split", "stat1_IGD.loc[\"Median\"].sort_values() best1_IGD = list(meds1_IGD.index[:2]) meds2_IGD = stat2_IGD.loc[\"Median\"].sort_values() best2_IGD = list(meds2_IGD.index[:2])", "data file dat = pd.read_csv(\"./num_res/{}.GD.csv\".format(market)) # split into two experiments", "import numpy as np import pandas as pd import sys", "exp1_Hypervolume.median(), exp1_Hypervolume.std()]) stat1_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"] stat2_Hypervolume = pd.DataFrame(", "dat[dat.columns[5:]] # calculate statistics stat1_MaxSpread = pd.DataFrame( [exp1_MaxSpread.max(), exp1_MaxSpread.median(), exp1_MaxSpread.std()])", "\"Spacing\", \"Spacing\", \"Spacing\", \"MaxSpread\", \"MaxSpread\", \"MaxSpread\", \"Delta\", \"Delta\", \"Delta\", \"IGD\",", "= list(meds2_Delta.index[:2]) print(\"{}.Delta:\".format(market), best1_Delta[0], best1_Delta[1]) # print(\"{}.Delta:\".format(market), best2_Delta[0], best2_Delta[1]) #", "= stat1_Spacing.loc[\"Median\"].sort_values() best1_Spacing = list(meds1_Spacing.index[:2]) meds2_Spacing = stat2_Spacing.loc[\"Median\"].sort_values() best2_Spacing =", "= markets[int(sys.argv[1])-1] # read GD data file dat = pd.read_csv(\"./num_res/{}.GD.csv\".format(market))", "= list(meds2_Hypervolume.index[:2]) print(\"{}.Hypervolume:\".format(market), best1_Hypervolume[0], best1_Hypervolume[1]) # print(\"{}.Hypervolume:\".format(market), # best2_Hypervolume[0], best2_Hypervolume[1])", "data file dat = pd.read_csv(\"./num_res/{}.IGD.csv\".format(market)) # split into two experiments", "exp1_MaxSpread.std()]) stat1_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"] stat2_MaxSpread = pd.DataFrame( [exp2_MaxSpread.max(),", "calculate statistics stat1_Delta = pd.DataFrame( [exp1_Delta.min(), exp1_Delta.median(), exp1_Delta.std()]) stat1_Delta.index =", "into two experiments exp1_Spacing = dat[dat.columns[:5]] exp2_Spacing = dat[dat.columns[5:]] #", "TODO: check error # read MaxSpread data file dat =", "[[\"GD\", \"GD\", \"GD\", \"Spacing\", \"Spacing\", \"Spacing\", \"MaxSpread\", \"MaxSpread\", \"MaxSpread\", \"Delta\",", "pd.DataFrame([exp1_IGD.min(), exp1_IGD.median(), exp1_IGD.std()]) stat1_IGD.index = [\"Best\", \"Median\", \"Std.\"] stat2_IGD =", "= [\"Best\", \"Median\", \"Std.\"] stat2_MaxSpread = pd.DataFrame( [exp2_MaxSpread.max(), exp2_MaxSpread.median(), exp2_MaxSpread.std()])", "stat2_Delta.index = [\"Best\", \"Median\", \"Std.\"] # find best and second", "stat2_Spacing, stat2_MaxSpread, stat2_Delta, stat2_IGD, stat2_Hypervolume]) arrays = [[\"GD\", \"GD\", \"GD\",", "pd.DataFrame( [exp2_MaxSpread.max(), exp2_MaxSpread.median(), exp2_MaxSpread.std()]) stat2_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"] #", "# calculate statistics stat1_MaxSpread = pd.DataFrame( [exp1_MaxSpread.max(), exp1_MaxSpread.median(), exp1_MaxSpread.std()]) stat1_MaxSpread.index", "algorithm meds1_Hypervolume = stat1_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best1_Hypervolume = list(meds1_Hypervolume.index[:2]) meds2_Hypervolume =", "[exp2_Hypervolume.max(), exp2_Hypervolume.median(), exp2_Hypervolume.std()]) stat2_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"] # find", "two experiments exp1_Hypervolume = dat[dat.columns[:5]] exp2_Hypervolume = dat[dat.columns[5:]] # calculate", "stat2_Spacing = pd.DataFrame( [exp2_Spacing.min(), exp2_Spacing.median(), exp2_Spacing.std()]) stat2_Spacing.index = [\"Best\", \"Median\",", "pd.read_csv(\"./num_res/{}.IGD.csv\".format(market)) # split into two experiments exp1_IGD = dat[dat.columns[:5]] exp2_IGD", "\"Std.\"] stat2_Delta = pd.DataFrame( [exp2_Delta.min(), exp2_Delta.median(), exp2_Delta.std()]) stat2_Delta.index = [\"Best\",", "= pd.read_csv(\"./num_res/{}.GD.csv\".format(market)) # split into two experiments exp1_GD = dat[dat.columns[:5]]", "stat2_MaxSpread = pd.DataFrame( [exp2_MaxSpread.max(), exp2_MaxSpread.median(), exp2_MaxSpread.std()]) stat2_MaxSpread.index = [\"Best\", \"Median\",", "pd.read_csv(\"./num_res/{}.Hypervolume.csv\".format(market)) # split into two experiments exp1_Hypervolume = dat[dat.columns[:5]] exp2_Hypervolume", "data file dat = pd.read_csv(\"./num_res/{}.Delta.csv\".format(market)) # split into two experiments", "dat = pd.read_csv(\"./num_res/{}.GD.csv\".format(market)) # split into two experiments exp1_GD =", "calculate statistics stat1_MaxSpread = pd.DataFrame( [exp1_MaxSpread.max(), exp1_MaxSpread.median(), exp1_MaxSpread.std()]) stat1_MaxSpread.index =", "stat1_MaxSpread = pd.DataFrame( [exp1_MaxSpread.max(), exp1_MaxSpread.median(), exp1_MaxSpread.std()]) stat1_MaxSpread.index = [\"Best\", \"Median\",", "[\"Best\", \"Median\", \"Std.\"] stat2_Spacing = pd.DataFrame( [exp2_Spacing.min(), exp2_Spacing.median(), exp2_Spacing.std()]) stat2_Spacing.index", "error # read Hypervolume data file dat = pd.read_csv(\"./num_res/{}.Hypervolume.csv\".format(market)) #", "best1_Hypervolume = list(meds1_Hypervolume.index[:2]) meds2_Hypervolume = stat2_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best2_Hypervolume = list(meds2_Hypervolume.index[:2])", "error # read Delta data file dat = pd.read_csv(\"./num_res/{}.Delta.csv\".format(market)) #", "meds2_Spacing = stat2_Spacing.loc[\"Median\"].sort_values() best2_Spacing = list(meds2_Spacing.index[:2]) print(\"{}.Spacing:\".format(market), best1_Spacing[0], best1_Spacing[1]) #", "= [\"Best\", \"Median\", \"Std.\"] stat2_GD = pd.DataFrame([exp2_GD.min(), exp2_GD.median(), exp2_GD.std()]) stat2_GD.index", "\"Median\", \"Std.\"] # find best and second best algorithm meds1_Delta", "stat1_Spacing.loc[\"Median\"].sort_values() best1_Spacing = list(meds1_Spacing.index[:2]) meds2_Spacing = stat2_Spacing.loc[\"Median\"].sort_values() best2_Spacing = list(meds2_Spacing.index[:2])", "best2_Delta[0], best2_Delta[1]) # TODO: check error # read IGD data", "best1_GD[0], best1_GD[1]) # print(\"{}.GD:\".format(market), best2_GD[0], best2_GD[1]) # TODO: check error", "# find best and second best algorithm meds1_MaxSpread = stat1_MaxSpread.loc[\"Median\"].sort_values(ascending=False)", "TODO: check error # read Delta data file dat =", "\"IGD\", \"IGD\", \"Hypervolume\", \"Hypervolume\", \"Hypervolume\"], stat1_overall.index ] index = pd.MultiIndex.from_arrays(arrays,", "two experiments exp1_GD = dat[dat.columns[:5]] exp2_GD = dat[dat.columns[5:]] # calculate", "GD data file dat = pd.read_csv(\"./num_res/{}.GD.csv\".format(market)) # split into two", "\"Std.\"] stat2_Spacing = pd.DataFrame( [exp2_Spacing.min(), exp2_Spacing.median(), exp2_Spacing.std()]) stat2_Spacing.index = [\"Best\",", "exp2_Delta = dat[dat.columns[5:]] # calculate statistics stat1_Delta = pd.DataFrame( [exp1_Delta.min(),", "# TODO: check error # read Hypervolume data file dat", "exp2_MaxSpread.std()]) stat2_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"] # find best and", "best algorithm meds1_Spacing = stat1_Spacing.loc[\"Median\"].sort_values() best1_Spacing = list(meds1_Spacing.index[:2]) meds2_Spacing =", "[exp2_Spacing.min(), exp2_Spacing.median(), exp2_Spacing.std()]) stat2_Spacing.index = [\"Best\", \"Median\", \"Std.\"] # find", "exp2_IGD.std()]) stat2_IGD.index = [\"Best\", \"Median\", \"Std.\"] # find best and", "\"Std.\"] # find best and second best algorithm meds1_Hypervolume =", "= stat1_GD.loc[\"Median\"].sort_values() best1_GD = list(meds1_GD.index[:2]) meds2_GD = stat2_GD.loc[\"Median\"].sort_values() best2_GD =", "and second best algorithm meds1_IGD = stat1_IGD.loc[\"Median\"].sort_values() best1_IGD = list(meds1_IGD.index[:2])", "# TODO: check error # read IGD data file dat", "split into two experiments exp1_Spacing = dat[dat.columns[:5]] exp2_Spacing = dat[dat.columns[5:]]", "= [\"Best\", \"Median\", \"Std.\"] # find best and second best", "\"Median\", \"Std.\"] # find best and second best algorithm meds1_Spacing", "stat2_GD.index = [\"Best\", \"Median\", \"Std.\"] # find best and second", "exp2_Spacing.std()]) stat2_Spacing.index = [\"Best\", \"Median\", \"Std.\"] # find best and", "stat1_Hypervolume = pd.DataFrame( [exp1_Hypervolume.max(), exp1_Hypervolume.median(), exp1_Hypervolume.std()]) stat1_Hypervolume.index = [\"Best\", \"Median\",", "<reponame>vitorebatista/AVEMH import numpy as np import pandas as pd import", "sys markets = [\"hangseng\", \"dax\", \"ftse\", \"sp\", \"nikkei\"] market =", "best2_IGD = list(meds2_IGD.index[:2]) print(\"{}.IGD:\".format(market), best1_IGD[0], best1_IGD[1]) # print(\"{}.IGD:\".format(market), best2_IGD[0], best2_IGD[1])", "print(\"{}.Spacing:\".format(market), best2_Spacing[0], best2_Spacing[1]) # TODO: check error # read MaxSpread", "experiments exp1_Delta = dat[dat.columns[:5]] exp2_Delta = dat[dat.columns[5:]] # calculate statistics", "second best algorithm meds1_Hypervolume = stat1_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best1_Hypervolume = list(meds1_Hypervolume.index[:2])", "into two experiments exp1_IGD = dat[dat.columns[:5]] exp2_IGD = dat[dat.columns[5:]] #", "stat1_overall = pd.concat( [stat1_GD, stat1_Spacing, stat1_MaxSpread, stat1_Delta, stat1_IGD, stat1_Hypervolume]) stat2_overall", "best algorithm meds1_Delta = stat1_Delta.loc[\"Median\"].sort_values() best1_Delta = list(meds1_Delta.index[:2]) meds2_Delta =", "exp2_Hypervolume = dat[dat.columns[5:]] # calculate statistics stat1_Hypervolume = pd.DataFrame( [exp1_Hypervolume.max(),", "best2_Delta[1]) # TODO: check error # read IGD data file", "stat1_Spacing, stat1_MaxSpread, stat1_Delta, stat1_IGD, stat1_Hypervolume]) stat2_overall = pd.concat( [stat2_GD, stat2_Spacing,", "two experiments exp1_IGD = dat[dat.columns[:5]] exp2_IGD = dat[dat.columns[5:]] # calculate", "exp2_GD = dat[dat.columns[5:]] # calculate statistics stat1_GD = pd.DataFrame([exp1_GD.min(), exp1_GD.median(),", "pd.read_csv(\"./num_res/{}.Spacing.csv\".format(market)) # split into two experiments exp1_Spacing = dat[dat.columns[:5]] exp2_Spacing", "exp1_Spacing = dat[dat.columns[:5]] exp2_Spacing = dat[dat.columns[5:]] # calculate statistics stat1_Spacing", "print(\"{}\\n----------------------------------------------\".format(market)) pd.options.display.float_format = '{:.2e}'.format stat1_overall = pd.concat( [stat1_GD, stat1_Spacing, stat1_MaxSpread,", "print(\"{}.Hypervolume:\".format(market), # best2_Hypervolume[0], best2_Hypervolume[1]) # TODO: check error print(\"{}\\n----------------------------------------------\".format(market)) pd.options.display.float_format", "stat2_GD = pd.DataFrame([exp2_GD.min(), exp2_GD.median(), exp2_GD.std()]) stat2_GD.index = [\"Best\", \"Median\", \"Std.\"]", "data file dat = pd.read_csv(\"./num_res/{}.MaxSpread.csv\".format(market)) # split into two experiments", "best1_IGD = list(meds1_IGD.index[:2]) meds2_IGD = stat2_IGD.loc[\"Median\"].sort_values() best2_IGD = list(meds2_IGD.index[:2]) print(\"{}.IGD:\".format(market),", "best and second best algorithm meds1_Hypervolume = stat1_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best1_Hypervolume", "= stat2_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best2_Hypervolume = list(meds2_Hypervolume.index[:2]) print(\"{}.Hypervolume:\".format(market), best1_Hypervolume[0], best1_Hypervolume[1]) #", "exp1_IGD.std()]) stat1_IGD.index = [\"Best\", \"Median\", \"Std.\"] stat2_IGD = pd.DataFrame([exp2_IGD.min(), exp2_IGD.median(),", "exp2_GD.median(), exp2_GD.std()]) stat2_GD.index = [\"Best\", \"Median\", \"Std.\"] # find best", "meds1_Hypervolume = stat1_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best1_Hypervolume = list(meds1_Hypervolume.index[:2]) meds2_Hypervolume = stat2_Hypervolume.loc[\"Median\"].sort_values(", "= dat[dat.columns[5:]] # calculate statistics stat1_Spacing = pd.DataFrame( [exp1_Spacing.min(), exp1_Spacing.median(),", "file dat = pd.read_csv(\"./num_res/{}.MaxSpread.csv\".format(market)) # split into two experiments exp1_MaxSpread", "\"Median\", \"Std.\"] # find best and second best algorithm meds1_GD", "print(\"{}.Delta:\".format(market), best1_Delta[0], best1_Delta[1]) # print(\"{}.Delta:\".format(market), best2_Delta[0], best2_Delta[1]) # TODO: check", "[\"Best\", \"Median\", \"Std.\"] stat2_IGD = pd.DataFrame([exp2_IGD.min(), exp2_IGD.median(), exp2_IGD.std()]) stat2_IGD.index =", "= pd.DataFrame([exp1_GD.min(), exp1_GD.median(), exp1_GD.std()]) stat1_GD.index = [\"Best\", \"Median\", \"Std.\"] stat2_GD", "best2_GD[0], best2_GD[1]) # TODO: check error # read Spacing data", "# read Hypervolume data file dat = pd.read_csv(\"./num_res/{}.Hypervolume.csv\".format(market)) # split", "# split into two experiments exp1_GD = dat[dat.columns[:5]] exp2_GD =", "= [[\"GD\", \"GD\", \"GD\", \"Spacing\", \"Spacing\", \"Spacing\", \"MaxSpread\", \"MaxSpread\", \"MaxSpread\",", "pd.read_csv(\"./num_res/{}.Delta.csv\".format(market)) # split into two experiments exp1_Delta = dat[dat.columns[:5]] exp2_Delta", "exp2_IGD = dat[dat.columns[5:]] # calculate statistics stat1_IGD = pd.DataFrame([exp1_IGD.min(), exp1_IGD.median(),", "statistics stat1_Hypervolume = pd.DataFrame( [exp1_Hypervolume.max(), exp1_Hypervolume.median(), exp1_Hypervolume.std()]) stat1_Hypervolume.index = [\"Best\",", "stat1_Spacing = pd.DataFrame( [exp1_Spacing.min(), exp1_Spacing.median(), exp1_Spacing.std()]) stat1_Spacing.index = [\"Best\", \"Median\",", "best2_Spacing[1]) # TODO: check error # read MaxSpread data file", "pd.DataFrame( [exp1_MaxSpread.max(), exp1_MaxSpread.median(), exp1_MaxSpread.std()]) stat1_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"] stat2_MaxSpread", "exp1_IGD = dat[dat.columns[:5]] exp2_IGD = dat[dat.columns[5:]] # calculate statistics stat1_IGD", "[exp1_Spacing.min(), exp1_Spacing.median(), exp1_Spacing.std()]) stat1_Spacing.index = [\"Best\", \"Median\", \"Std.\"] stat2_Spacing =", "= list(meds1_Spacing.index[:2]) meds2_Spacing = stat2_Spacing.loc[\"Median\"].sort_values() best2_Spacing = list(meds2_Spacing.index[:2]) print(\"{}.Spacing:\".format(market), best1_Spacing[0],", "dat[dat.columns[:5]] exp2_MaxSpread = dat[dat.columns[5:]] # calculate statistics stat1_MaxSpread = pd.DataFrame(", "stat2_IGD.index = [\"Best\", \"Median\", \"Std.\"] # find best and second", "TODO: check error # read IGD data file dat =", "= pd.DataFrame( [exp1_Hypervolume.max(), exp1_Hypervolume.median(), exp1_Hypervolume.std()]) stat1_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"]", "= dat[dat.columns[5:]] # calculate statistics stat1_Hypervolume = pd.DataFrame( [exp1_Hypervolume.max(), exp1_Hypervolume.median(),", "= dat[dat.columns[5:]] # calculate statistics stat1_MaxSpread = pd.DataFrame( [exp1_MaxSpread.max(), exp1_MaxSpread.median(),", "\"Std.\"] # find best and second best algorithm meds1_MaxSpread =", "stat1_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"] stat2_Hypervolume = pd.DataFrame( [exp2_Hypervolume.max(), exp2_Hypervolume.median(),", "exp2_Hypervolume.median(), exp2_Hypervolume.std()]) stat2_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"] # find best", "best1_MaxSpread[1]) # print(\"{}.MaxSpread:\".format(market), best2_MaxSpread[0], best2_MaxSpread[1]) # TODO: check error #", "= list(meds1_MaxSpread.index[:2]) meds2_MaxSpread = stat2_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best2_MaxSpread = list(meds2_MaxSpread.index[:2]) print(\"{}.MaxSpread:\".format(market), best1_MaxSpread[0],", "read Spacing data file dat = pd.read_csv(\"./num_res/{}.Spacing.csv\".format(market)) # split into", "stat2_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"] # find best and second", "list(meds2_IGD.index[:2]) print(\"{}.IGD:\".format(market), best1_IGD[0], best1_IGD[1]) # print(\"{}.IGD:\".format(market), best2_IGD[0], best2_IGD[1]) # TODO:", "= [\"Best\", \"Median\", \"Std.\"] stat2_Spacing = pd.DataFrame( [exp2_Spacing.min(), exp2_Spacing.median(), exp2_Spacing.std()])", "find best and second best algorithm meds1_Hypervolume = stat1_Hypervolume.loc[\"Median\"].sort_values( ascending=False)", "\"Hypervolume\", \"Hypervolume\"], stat1_overall.index ] index = pd.MultiIndex.from_arrays(arrays, names=[\"Metric\", \"\"]) stat1_overall.index", "statistics stat1_MaxSpread = pd.DataFrame( [exp1_MaxSpread.max(), exp1_MaxSpread.median(), exp1_MaxSpread.std()]) stat1_MaxSpread.index = [\"Best\",", "= dat[dat.columns[:5]] exp2_Delta = dat[dat.columns[5:]] # calculate statistics stat1_Delta =", "experiments exp1_Spacing = dat[dat.columns[:5]] exp2_Spacing = dat[dat.columns[5:]] # calculate statistics", "read IGD data file dat = pd.read_csv(\"./num_res/{}.IGD.csv\".format(market)) # split into", "stat1_Delta.loc[\"Median\"].sort_values() best1_Delta = list(meds1_Delta.index[:2]) meds2_Delta = stat2_Delta.loc[\"Median\"].sort_values() best2_Delta = list(meds2_Delta.index[:2])", "find best and second best algorithm meds1_GD = stat1_GD.loc[\"Median\"].sort_values() best1_GD", "\"MaxSpread\", \"MaxSpread\", \"Delta\", \"Delta\", \"Delta\", \"IGD\", \"IGD\", \"IGD\", \"Hypervolume\", \"Hypervolume\",", "stat2_overall = pd.concat( [stat2_GD, stat2_Spacing, stat2_MaxSpread, stat2_Delta, stat2_IGD, stat2_Hypervolume]) arrays", "best1_Spacing = list(meds1_Spacing.index[:2]) meds2_Spacing = stat2_Spacing.loc[\"Median\"].sort_values() best2_Spacing = list(meds2_Spacing.index[:2]) print(\"{}.Spacing:\".format(market),", "\"\"]) stat1_overall.index = index stat2_overall.index = index print(stat1_overall) print(\"----------------------------------------------\") print(stat2_overall)", "best1_Spacing[0], best1_Spacing[1]) # print(\"{}.Spacing:\".format(market), best2_Spacing[0], best2_Spacing[1]) # TODO: check error", "stat1_Delta.index = [\"Best\", \"Median\", \"Std.\"] stat2_Delta = pd.DataFrame( [exp2_Delta.min(), exp2_Delta.median(),", "dat = pd.read_csv(\"./num_res/{}.Hypervolume.csv\".format(market)) # split into two experiments exp1_Hypervolume =", "experiments exp1_IGD = dat[dat.columns[:5]] exp2_IGD = dat[dat.columns[5:]] # calculate statistics", "stat1_IGD = pd.DataFrame([exp1_IGD.min(), exp1_IGD.median(), exp1_IGD.std()]) stat1_IGD.index = [\"Best\", \"Median\", \"Std.\"]", "second best algorithm meds1_Delta = stat1_Delta.loc[\"Median\"].sort_values() best1_Delta = list(meds1_Delta.index[:2]) meds2_Delta", "print(\"{}.IGD:\".format(market), best2_IGD[0], best2_IGD[1]) # TODO: check error # read Hypervolume", "= dat[dat.columns[:5]] exp2_Spacing = dat[dat.columns[5:]] # calculate statistics stat1_Spacing =", "dat = pd.read_csv(\"./num_res/{}.IGD.csv\".format(market)) # split into two experiments exp1_IGD =", "best1_Delta[1]) # print(\"{}.Delta:\".format(market), best2_Delta[0], best2_Delta[1]) # TODO: check error #", "= list(meds1_IGD.index[:2]) meds2_IGD = stat2_IGD.loc[\"Median\"].sort_values() best2_IGD = list(meds2_IGD.index[:2]) print(\"{}.IGD:\".format(market), best1_IGD[0],", "algorithm meds1_IGD = stat1_IGD.loc[\"Median\"].sort_values() best1_IGD = list(meds1_IGD.index[:2]) meds2_IGD = stat2_IGD.loc[\"Median\"].sort_values()", "[\"Best\", \"Median\", \"Std.\"] stat2_Delta = pd.DataFrame( [exp2_Delta.min(), exp2_Delta.median(), exp2_Delta.std()]) stat2_Delta.index", "\"Spacing\", \"Spacing\", \"MaxSpread\", \"MaxSpread\", \"MaxSpread\", \"Delta\", \"Delta\", \"Delta\", \"IGD\", \"IGD\",", "= '{:.2e}'.format stat1_overall = pd.concat( [stat1_GD, stat1_Spacing, stat1_MaxSpread, stat1_Delta, stat1_IGD,", "check error # read Hypervolume data file dat = pd.read_csv(\"./num_res/{}.Hypervolume.csv\".format(market))", "[exp1_Delta.min(), exp1_Delta.median(), exp1_Delta.std()]) stat1_Delta.index = [\"Best\", \"Median\", \"Std.\"] stat2_Delta =", "\"dax\", \"ftse\", \"sp\", \"nikkei\"] market = markets[int(sys.argv[1])-1] # read GD", "= pd.read_csv(\"./num_res/{}.MaxSpread.csv\".format(market)) # split into two experiments exp1_MaxSpread = dat[dat.columns[:5]]", "stat2_IGD, stat2_Hypervolume]) arrays = [[\"GD\", \"GD\", \"GD\", \"Spacing\", \"Spacing\", \"Spacing\",", "stat1_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"] stat2_MaxSpread = pd.DataFrame( [exp2_MaxSpread.max(), exp2_MaxSpread.median(),", "= stat1_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best1_MaxSpread = list(meds1_MaxSpread.index[:2]) meds2_MaxSpread = stat2_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best2_MaxSpread =", "into two experiments exp1_GD = dat[dat.columns[:5]] exp2_GD = dat[dat.columns[5:]] #", "stat1_Spacing.index = [\"Best\", \"Median\", \"Std.\"] stat2_Spacing = pd.DataFrame( [exp2_Spacing.min(), exp2_Spacing.median(),", "[stat1_GD, stat1_Spacing, stat1_MaxSpread, stat1_Delta, stat1_IGD, stat1_Hypervolume]) stat2_overall = pd.concat( [stat2_GD,", "dat = pd.read_csv(\"./num_res/{}.Spacing.csv\".format(market)) # split into two experiments exp1_Spacing =", "list(meds2_Delta.index[:2]) print(\"{}.Delta:\".format(market), best1_Delta[0], best1_Delta[1]) # print(\"{}.Delta:\".format(market), best2_Delta[0], best2_Delta[1]) # TODO:", "[exp2_MaxSpread.max(), exp2_MaxSpread.median(), exp2_MaxSpread.std()]) stat2_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"] # find", "= list(meds2_Spacing.index[:2]) print(\"{}.Spacing:\".format(market), best1_Spacing[0], best1_Spacing[1]) # print(\"{}.Spacing:\".format(market), best2_Spacing[0], best2_Spacing[1]) #", "best2_Delta = list(meds2_Delta.index[:2]) print(\"{}.Delta:\".format(market), best1_Delta[0], best1_Delta[1]) # print(\"{}.Delta:\".format(market), best2_Delta[0], best2_Delta[1])", "\"IGD\", \"Hypervolume\", \"Hypervolume\", \"Hypervolume\"], stat1_overall.index ] index = pd.MultiIndex.from_arrays(arrays, names=[\"Metric\",", "best1_Spacing[1]) # print(\"{}.Spacing:\".format(market), best2_Spacing[0], best2_Spacing[1]) # TODO: check error #", "# find best and second best algorithm meds1_Hypervolume = stat1_Hypervolume.loc[\"Median\"].sort_values(", "\"Delta\", \"Delta\", \"Delta\", \"IGD\", \"IGD\", \"IGD\", \"Hypervolume\", \"Hypervolume\", \"Hypervolume\"], stat1_overall.index", "best1_MaxSpread[0], best1_MaxSpread[1]) # print(\"{}.MaxSpread:\".format(market), best2_MaxSpread[0], best2_MaxSpread[1]) # TODO: check error", "\"MaxSpread\", \"MaxSpread\", \"MaxSpread\", \"Delta\", \"Delta\", \"Delta\", \"IGD\", \"IGD\", \"IGD\", \"Hypervolume\",", "pd.options.display.float_format = '{:.2e}'.format stat1_overall = pd.concat( [stat1_GD, stat1_Spacing, stat1_MaxSpread, stat1_Delta,", "list(meds2_Spacing.index[:2]) print(\"{}.Spacing:\".format(market), best1_Spacing[0], best1_Spacing[1]) # print(\"{}.Spacing:\".format(market), best2_Spacing[0], best2_Spacing[1]) # TODO:", "= pd.concat( [stat2_GD, stat2_Spacing, stat2_MaxSpread, stat2_Delta, stat2_IGD, stat2_Hypervolume]) arrays =", "exp1_GD.median(), exp1_GD.std()]) stat1_GD.index = [\"Best\", \"Median\", \"Std.\"] stat2_GD = pd.DataFrame([exp2_GD.min(),", "file dat = pd.read_csv(\"./num_res/{}.GD.csv\".format(market)) # split into two experiments exp1_GD", "exp1_Spacing.std()]) stat1_Spacing.index = [\"Best\", \"Median\", \"Std.\"] stat2_Spacing = pd.DataFrame( [exp2_Spacing.min(),", "pd import sys markets = [\"hangseng\", \"dax\", \"ftse\", \"sp\", \"nikkei\"]", "file dat = pd.read_csv(\"./num_res/{}.Spacing.csv\".format(market)) # split into two experiments exp1_Spacing", "\"Median\", \"Std.\"] stat2_Delta = pd.DataFrame( [exp2_Delta.min(), exp2_Delta.median(), exp2_Delta.std()]) stat2_Delta.index =", "as np import pandas as pd import sys markets =", "= pd.DataFrame( [exp1_MaxSpread.max(), exp1_MaxSpread.median(), exp1_MaxSpread.std()]) stat1_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"]", "error print(\"{}\\n----------------------------------------------\".format(market)) pd.options.display.float_format = '{:.2e}'.format stat1_overall = pd.concat( [stat1_GD, stat1_Spacing,", "two experiments exp1_Delta = dat[dat.columns[:5]] exp2_Delta = dat[dat.columns[5:]] # calculate", "file dat = pd.read_csv(\"./num_res/{}.Delta.csv\".format(market)) # split into two experiments exp1_Delta", "np import pandas as pd import sys markets = [\"hangseng\",", "exp1_Hypervolume.std()]) stat1_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"] stat2_Hypervolume = pd.DataFrame( [exp2_Hypervolume.max(),", "= stat2_IGD.loc[\"Median\"].sort_values() best2_IGD = list(meds2_IGD.index[:2]) print(\"{}.IGD:\".format(market), best1_IGD[0], best1_IGD[1]) # print(\"{}.IGD:\".format(market),", "exp1_Delta.std()]) stat1_Delta.index = [\"Best\", \"Median\", \"Std.\"] stat2_Delta = pd.DataFrame( [exp2_Delta.min(),", "\"Delta\", \"IGD\", \"IGD\", \"IGD\", \"Hypervolume\", \"Hypervolume\", \"Hypervolume\"], stat1_overall.index ] index", "experiments exp1_Hypervolume = dat[dat.columns[:5]] exp2_Hypervolume = dat[dat.columns[5:]] # calculate statistics", "pd.DataFrame([exp2_GD.min(), exp2_GD.median(), exp2_GD.std()]) stat2_GD.index = [\"Best\", \"Median\", \"Std.\"] # find", "exp1_MaxSpread = dat[dat.columns[:5]] exp2_MaxSpread = dat[dat.columns[5:]] # calculate statistics stat1_MaxSpread", "find best and second best algorithm meds1_Spacing = stat1_Spacing.loc[\"Median\"].sort_values() best1_Spacing", "= pd.DataFrame( [exp2_Spacing.min(), exp2_Spacing.median(), exp2_Spacing.std()]) stat2_Spacing.index = [\"Best\", \"Median\", \"Std.\"]", "# print(\"{}.IGD:\".format(market), best2_IGD[0], best2_IGD[1]) # TODO: check error # read", "best2_Spacing[0], best2_Spacing[1]) # TODO: check error # read MaxSpread data", "pd.DataFrame( [exp1_Delta.min(), exp1_Delta.median(), exp1_Delta.std()]) stat1_Delta.index = [\"Best\", \"Median\", \"Std.\"] stat2_Delta", "= stat1_IGD.loc[\"Median\"].sort_values() best1_IGD = list(meds1_IGD.index[:2]) meds2_IGD = stat2_IGD.loc[\"Median\"].sort_values() best2_IGD =", "algorithm meds1_GD = stat1_GD.loc[\"Median\"].sort_values() best1_GD = list(meds1_GD.index[:2]) meds2_GD = stat2_GD.loc[\"Median\"].sort_values()", "best1_GD = list(meds1_GD.index[:2]) meds2_GD = stat2_GD.loc[\"Median\"].sort_values() best2_GD = list(meds2_GD.index[:2]) print(\"{}.GD:\".format(market),", "# TODO: check error # read Spacing data file dat", "numpy as np import pandas as pd import sys markets", "pd.concat( [stat2_GD, stat2_Spacing, stat2_MaxSpread, stat2_Delta, stat2_IGD, stat2_Hypervolume]) arrays = [[\"GD\",", "# calculate statistics stat1_GD = pd.DataFrame([exp1_GD.min(), exp1_GD.median(), exp1_GD.std()]) stat1_GD.index =", "\"Median\", \"Std.\"] stat2_Spacing = pd.DataFrame( [exp2_Spacing.min(), exp2_Spacing.median(), exp2_Spacing.std()]) stat2_Spacing.index =", "\"Std.\"] stat2_MaxSpread = pd.DataFrame( [exp2_MaxSpread.max(), exp2_MaxSpread.median(), exp2_MaxSpread.std()]) stat2_MaxSpread.index = [\"Best\",", "# print(\"{}.GD:\".format(market), best2_GD[0], best2_GD[1]) # TODO: check error # read", "\"GD\", \"Spacing\", \"Spacing\", \"Spacing\", \"MaxSpread\", \"MaxSpread\", \"MaxSpread\", \"Delta\", \"Delta\", \"Delta\",", "# split into two experiments exp1_Hypervolume = dat[dat.columns[:5]] exp2_Hypervolume =", "\"Median\", \"Std.\"] stat2_Hypervolume = pd.DataFrame( [exp2_Hypervolume.max(), exp2_Hypervolume.median(), exp2_Hypervolume.std()]) stat2_Hypervolume.index =", "print(\"{}.Hypervolume:\".format(market), best1_Hypervolume[0], best1_Hypervolume[1]) # print(\"{}.Hypervolume:\".format(market), # best2_Hypervolume[0], best2_Hypervolume[1]) # TODO:", "\"IGD\", \"IGD\", \"IGD\", \"Hypervolume\", \"Hypervolume\", \"Hypervolume\"], stat1_overall.index ] index =", "\"Std.\"] # find best and second best algorithm meds1_GD =", "# calculate statistics stat1_Hypervolume = pd.DataFrame( [exp1_Hypervolume.max(), exp1_Hypervolume.median(), exp1_Hypervolume.std()]) stat1_Hypervolume.index", "list(meds2_GD.index[:2]) print(\"{}.GD:\".format(market), best1_GD[0], best1_GD[1]) # print(\"{}.GD:\".format(market), best2_GD[0], best2_GD[1]) # TODO:", "stat1_Delta = pd.DataFrame( [exp1_Delta.min(), exp1_Delta.median(), exp1_Delta.std()]) stat1_Delta.index = [\"Best\", \"Median\",", "and second best algorithm meds1_Hypervolume = stat1_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best1_Hypervolume =", "= [\"Best\", \"Median\", \"Std.\"] stat2_Delta = pd.DataFrame( [exp2_Delta.min(), exp2_Delta.median(), exp2_Delta.std()])", "TODO: check error print(\"{}\\n----------------------------------------------\".format(market)) pd.options.display.float_format = '{:.2e}'.format stat1_overall = pd.concat(", "= [\"hangseng\", \"dax\", \"ftse\", \"sp\", \"nikkei\"] market = markets[int(sys.argv[1])-1] #", "= pd.DataFrame( [exp2_MaxSpread.max(), exp2_MaxSpread.median(), exp2_MaxSpread.std()]) stat2_MaxSpread.index = [\"Best\", \"Median\", \"Std.\"]", "= pd.read_csv(\"./num_res/{}.Hypervolume.csv\".format(market)) # split into two experiments exp1_Hypervolume = dat[dat.columns[:5]]", "# split into two experiments exp1_Delta = dat[dat.columns[:5]] exp2_Delta =", "= stat2_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best2_MaxSpread = list(meds2_MaxSpread.index[:2]) print(\"{}.MaxSpread:\".format(market), best1_MaxSpread[0], best1_MaxSpread[1]) # print(\"{}.MaxSpread:\".format(market),", "meds1_MaxSpread = stat1_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best1_MaxSpread = list(meds1_MaxSpread.index[:2]) meds2_MaxSpread = stat2_MaxSpread.loc[\"Median\"].sort_values(ascending=False) best2_MaxSpread", "best1_Delta = list(meds1_Delta.index[:2]) meds2_Delta = stat2_Delta.loc[\"Median\"].sort_values() best2_Delta = list(meds2_Delta.index[:2]) print(\"{}.Delta:\".format(market),", "stat2_Spacing.loc[\"Median\"].sort_values() best2_Spacing = list(meds2_Spacing.index[:2]) print(\"{}.Spacing:\".format(market), best1_Spacing[0], best1_Spacing[1]) # print(\"{}.Spacing:\".format(market), best2_Spacing[0],", "= pd.read_csv(\"./num_res/{}.Delta.csv\".format(market)) # split into two experiments exp1_Delta = dat[dat.columns[:5]]", "exp2_Hypervolume.std()]) stat2_Hypervolume.index = [\"Best\", \"Median\", \"Std.\"] # find best and", "] index = pd.MultiIndex.from_arrays(arrays, names=[\"Metric\", \"\"]) stat1_overall.index = index stat2_overall.index", "best2_Spacing = list(meds2_Spacing.index[:2]) print(\"{}.Spacing:\".format(market), best1_Spacing[0], best1_Spacing[1]) # print(\"{}.Spacing:\".format(market), best2_Spacing[0], best2_Spacing[1])", "stat2_Spacing.index = [\"Best\", \"Median\", \"Std.\"] # find best and second", "\"MaxSpread\", \"Delta\", \"Delta\", \"Delta\", \"IGD\", \"IGD\", \"IGD\", \"Hypervolume\", \"Hypervolume\", \"Hypervolume\"],", "file dat = pd.read_csv(\"./num_res/{}.Hypervolume.csv\".format(market)) # split into two experiments exp1_Hypervolume", "# read Delta data file dat = pd.read_csv(\"./num_res/{}.Delta.csv\".format(market)) # split", "meds2_Delta = stat2_Delta.loc[\"Median\"].sort_values() best2_Delta = list(meds2_Delta.index[:2]) print(\"{}.Delta:\".format(market), best1_Delta[0], best1_Delta[1]) #", "= dat[dat.columns[:5]] exp2_GD = dat[dat.columns[5:]] # calculate statistics stat1_GD =", "print(\"{}.Delta:\".format(market), best2_Delta[0], best2_Delta[1]) # TODO: check error # read IGD", "dat[dat.columns[5:]] # calculate statistics stat1_IGD = pd.DataFrame([exp1_IGD.min(), exp1_IGD.median(), exp1_IGD.std()]) stat1_IGD.index", "best2_MaxSpread[0], best2_MaxSpread[1]) # TODO: check error # read Delta data", "file dat = pd.read_csv(\"./num_res/{}.IGD.csv\".format(market)) # split into two experiments exp1_IGD", "# print(\"{}.Delta:\".format(market), best2_Delta[0], best2_Delta[1]) # TODO: check error # read", "# TODO: check error # read MaxSpread data file dat", "exp1_IGD.median(), exp1_IGD.std()]) stat1_IGD.index = [\"Best\", \"Median\", \"Std.\"] stat2_IGD = pd.DataFrame([exp2_IGD.min(),", "# best2_Hypervolume[0], best2_Hypervolume[1]) # TODO: check error print(\"{}\\n----------------------------------------------\".format(market)) pd.options.display.float_format =", "= pd.read_csv(\"./num_res/{}.IGD.csv\".format(market)) # split into two experiments exp1_IGD = dat[dat.columns[:5]]", "print(\"{}.GD:\".format(market), best2_GD[0], best2_GD[1]) # TODO: check error # read Spacing", "statistics stat1_Spacing = pd.DataFrame( [exp1_Spacing.min(), exp1_Spacing.median(), exp1_Spacing.std()]) stat1_Spacing.index = [\"Best\",", "exp2_Delta.std()]) stat2_Delta.index = [\"Best\", \"Median\", \"Std.\"] # find best and", "markets = [\"hangseng\", \"dax\", \"ftse\", \"sp\", \"nikkei\"] market = markets[int(sys.argv[1])-1]", "stat1_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best1_Hypervolume = list(meds1_Hypervolume.index[:2]) meds2_Hypervolume = stat2_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best2_Hypervolume", "best algorithm meds1_IGD = stat1_IGD.loc[\"Median\"].sort_values() best1_IGD = list(meds1_IGD.index[:2]) meds2_IGD =", "stat2_Hypervolume]) arrays = [[\"GD\", \"GD\", \"GD\", \"Spacing\", \"Spacing\", \"Spacing\", \"MaxSpread\",", "stat2_Hypervolume = pd.DataFrame( [exp2_Hypervolume.max(), exp2_Hypervolume.median(), exp2_Hypervolume.std()]) stat2_Hypervolume.index = [\"Best\", \"Median\",", "= stat2_Spacing.loc[\"Median\"].sort_values() best2_Spacing = list(meds2_Spacing.index[:2]) print(\"{}.Spacing:\".format(market), best1_Spacing[0], best1_Spacing[1]) # print(\"{}.Spacing:\".format(market),", "[\"Best\", \"Median\", \"Std.\"] stat2_Hypervolume = pd.DataFrame( [exp2_Hypervolume.max(), exp2_Hypervolume.median(), exp2_Hypervolume.std()]) stat2_Hypervolume.index", "ascending=False) best2_Hypervolume = list(meds2_Hypervolume.index[:2]) print(\"{}.Hypervolume:\".format(market), best1_Hypervolume[0], best1_Hypervolume[1]) # print(\"{}.Hypervolume:\".format(market), #", "= pd.concat( [stat1_GD, stat1_Spacing, stat1_MaxSpread, stat1_Delta, stat1_IGD, stat1_Hypervolume]) stat2_overall =", "\"GD\", \"GD\", \"Spacing\", \"Spacing\", \"Spacing\", \"MaxSpread\", \"MaxSpread\", \"MaxSpread\", \"Delta\", \"Delta\",", "import sys markets = [\"hangseng\", \"dax\", \"ftse\", \"sp\", \"nikkei\"] market", "stat1_IGD.index = [\"Best\", \"Median\", \"Std.\"] stat2_IGD = pd.DataFrame([exp2_IGD.min(), exp2_IGD.median(), exp2_IGD.std()])", "second best algorithm meds1_GD = stat1_GD.loc[\"Median\"].sort_values() best1_GD = list(meds1_GD.index[:2]) meds2_GD", "best2_MaxSpread[1]) # TODO: check error # read Delta data file", "stat1_GD.index = [\"Best\", \"Median\", \"Std.\"] stat2_GD = pd.DataFrame([exp2_GD.min(), exp2_GD.median(), exp2_GD.std()])", "exp2_IGD.median(), exp2_IGD.std()]) stat2_IGD.index = [\"Best\", \"Median\", \"Std.\"] # find best", "markets[int(sys.argv[1])-1] # read GD data file dat = pd.read_csv(\"./num_res/{}.GD.csv\".format(market)) #", "into two experiments exp1_Hypervolume = dat[dat.columns[:5]] exp2_Hypervolume = dat[dat.columns[5:]] #", "= pd.DataFrame([exp2_IGD.min(), exp2_IGD.median(), exp2_IGD.std()]) stat2_IGD.index = [\"Best\", \"Median\", \"Std.\"] #", "best and second best algorithm meds1_IGD = stat1_IGD.loc[\"Median\"].sort_values() best1_IGD =", "pd.DataFrame( [exp1_Spacing.min(), exp1_Spacing.median(), exp1_Spacing.std()]) stat1_Spacing.index = [\"Best\", \"Median\", \"Std.\"] stat2_Spacing", "pd.concat( [stat1_GD, stat1_Spacing, stat1_MaxSpread, stat1_Delta, stat1_IGD, stat1_Hypervolume]) stat2_overall = pd.concat(", "best and second best algorithm meds1_GD = stat1_GD.loc[\"Median\"].sort_values() best1_GD =", "split into two experiments exp1_GD = dat[dat.columns[:5]] exp2_GD = dat[dat.columns[5:]]", "# read IGD data file dat = pd.read_csv(\"./num_res/{}.IGD.csv\".format(market)) # split", "experiments exp1_GD = dat[dat.columns[:5]] exp2_GD = dat[dat.columns[5:]] # calculate statistics", "split into two experiments exp1_IGD = dat[dat.columns[:5]] exp2_IGD = dat[dat.columns[5:]]", "best2_Hypervolume = list(meds2_Hypervolume.index[:2]) print(\"{}.Hypervolume:\".format(market), best1_Hypervolume[0], best1_Hypervolume[1]) # print(\"{}.Hypervolume:\".format(market), # best2_Hypervolume[0],", "dat[dat.columns[:5]] exp2_GD = dat[dat.columns[5:]] # calculate statistics stat1_GD = pd.DataFrame([exp1_GD.min(),", "pd.DataFrame( [exp2_Delta.min(), exp2_Delta.median(), exp2_Delta.std()]) stat2_Delta.index = [\"Best\", \"Median\", \"Std.\"] #", "dat = pd.read_csv(\"./num_res/{}.Delta.csv\".format(market)) # split into two experiments exp1_Delta =", "best1_Hypervolume[0], best1_Hypervolume[1]) # print(\"{}.Hypervolume:\".format(market), # best2_Hypervolume[0], best2_Hypervolume[1]) # TODO: check", "stat2_Hypervolume.loc[\"Median\"].sort_values( ascending=False) best2_Hypervolume = list(meds2_Hypervolume.index[:2]) print(\"{}.Hypervolume:\".format(market), best1_Hypervolume[0], best1_Hypervolume[1]) # print(\"{}.Hypervolume:\".format(market),", "pd.read_csv(\"./num_res/{}.MaxSpread.csv\".format(market)) # split into two experiments exp1_MaxSpread = dat[dat.columns[:5]] exp2_MaxSpread", "= pd.DataFrame([exp2_GD.min(), exp2_GD.median(), exp2_GD.std()]) stat2_GD.index = [\"Best\", \"Median\", \"Std.\"] #", "exp2_MaxSpread = dat[dat.columns[5:]] # calculate statistics stat1_MaxSpread = pd.DataFrame( [exp1_MaxSpread.max(),", "stat1_MaxSpread, stat1_Delta, stat1_IGD, stat1_Hypervolume]) stat2_overall = pd.concat( [stat2_GD, stat2_Spacing, stat2_MaxSpread,", "stat1_Hypervolume]) stat2_overall = pd.concat( [stat2_GD, stat2_Spacing, stat2_MaxSpread, stat2_Delta, stat2_IGD, stat2_Hypervolume])", "meds2_IGD = stat2_IGD.loc[\"Median\"].sort_values() best2_IGD = list(meds2_IGD.index[:2]) print(\"{}.IGD:\".format(market), best1_IGD[0], best1_IGD[1]) #", "= list(meds2_IGD.index[:2]) print(\"{}.IGD:\".format(market), best1_IGD[0], best1_IGD[1]) # print(\"{}.IGD:\".format(market), best2_IGD[0], best2_IGD[1]) #", "[stat2_GD, stat2_Spacing, stat2_MaxSpread, stat2_Delta, stat2_IGD, stat2_Hypervolume]) arrays = [[\"GD\", \"GD\",", "dat[dat.columns[5:]] # calculate statistics stat1_GD = pd.DataFrame([exp1_GD.min(), exp1_GD.median(), exp1_GD.std()]) stat1_GD.index", "stat2_IGD.loc[\"Median\"].sort_values() best2_IGD = list(meds2_IGD.index[:2]) print(\"{}.IGD:\".format(market), best1_IGD[0], best1_IGD[1]) # print(\"{}.IGD:\".format(market), best2_IGD[0],", "error # read Spacing data file dat = pd.read_csv(\"./num_res/{}.Spacing.csv\".format(market)) #", "list(meds1_GD.index[:2]) meds2_GD = stat2_GD.loc[\"Median\"].sort_values() best2_GD = list(meds2_GD.index[:2]) print(\"{}.GD:\".format(market), best1_GD[0], best1_GD[1])" ]
[ "np.sqrt(cov(x, x, w) * cov(y, y, w)) print('Completed: Computed weighted", "int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])))", "for i in range(var.shape[0]): for j in range(var.shape[1]): varq =", "function!') ### Import modules import numpy as np ### Reshape", "'yes' if mask == 'yes': latq = np.where(lats > 40)[0]", "varq = var[:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask =", "wave def cov(x, y, w): \"\"\"Weighted Covariance\"\"\" wcov = np.sum(w", "Parameters ---------- varx : 2d array vary : 2d array", "for weights lon2,lat2 = np.meshgrid(lons,lats) ### Create 2d array of", "djfappendf = np.append(djfappendf1,varyravel[13+i,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_djf[counter,:,:] =", "vertical height (17 pressure coordinate levels) Parameters ---------- varx :", ": 3d array or 4d array [year,lat,lon] or [year,lev,lat,lon] vary_djf", ": 1d array of levels lons : 1d array of", "np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_djf = np.empty((int(vary.shape[0]-1),levsq,", ": 3d array vary : 3d array Returns ------- stat", "meanvar = np.empty((var.shape[0],var.shape[1])) for i in range(var.shape[0]): for j in", "range(var.shape[0]): for i in range(var.shape[1]): for j in range(var.shape[2]): varq", "meanvar ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorr(varx,vary,lats,lons,weight): \"\"\" Calculates spatial correlation", "print('\\n>>> Using calc_weightedAve function!') ### Import modules import numpy as", "[ens,year,month,lat,lon] into [ens,year,month] Parameters ---------- var : 5d,4d,3d array of", "print('Completed: Weighted variable average!') print('*Completed: Finished calc_weightedAve function!') return meanvar", "or 4d array [year,lat,lon] or [year,lev,lat,lon] vary_dj : 3d array", "LATITUDES!') ### Create 2d meshgrid for weights lon2,lat2 = np.meshgrid(lons,lats)", "weighted arguement in function!') print('*Completed: Finished calc_SpatialCorr function!') return corrcoef", "int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_dj = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i", "np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[ens,i,j]", "Import modules import numpy as np if weight == 'yes':", "or [year,month,lev,lat,lon] lat : 1d numpy array latitudes lon :", "Create 2d meshgrid for weights lon2,lat2 = np.meshgrid(lons,lats) ### Create", "range(var.shape[2]): varq = var[ens,i,j,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask", "as sts ### 2-independent sample t-test stat,pvalue = sts.ttest_ind(varx,vary,nan_policy='omit') ###", "gw,gw2 = np.meshgrid(lons,gwq) elif levelq == 'tropo': gwq = np.array([1.0,1.0,1.0,1.0,0.5,0.5,0.5,0.2,0.2,0.,0.,0.,", "np.sum(w) print('Completed: Computed weighted covariance!') return wcov def corr(x, y,", "return corrcoef ############################################################################### ############################################################################### ############################################################################### def calc_RMSE(varx,vary,lats,lons,weight): \"\"\" Calculates root", "= np.empty((var.shape[0])) varq = var[:,:] mask = np.isfinite(varq) & np.isfinite(lats)", "Returns ------- rmse : 1d array Usage ----- rmse =", "= var[i,j,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask]", "[3] calc_indttest(varx,vary) [4] calc_weightedAve(var,lats) [5] calc_spatialCorr(varx,vary,lats,lons,weight) [6] calc_RMSE(varx,vary,lats,lons,weight) [7] calc_spatialCorrHeight(varx,vary,lats,lons,weight)", "weighted correlation!') return wcor corrcoef = corr(varx,vary,gw) elif weight ==", "array lons : 1d array of latitude weight : string", "or [year,lev,lat,lon] vary_djf : 3d array [year,lat,lon] or [year,lev,lat,lon] Usage", ": string (yes or no) Returns ------- rmse : 1d", "# Computed weighted correlation coefficient ### Create 2d meshgrid for", "np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 2: meanvar = np.empty((var.shape[0])) varq =", "### Create 2d meshgrid for weights lon2,lev2 = np.meshgrid(lons,levs) if", "Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorrHeight function!')", "p-value Usage ----- stat,pvalue = calc_ttest(varx,vary) \"\"\" print('\\n>>> Using calc_ttest", "Reshape for 4d variables elif level == 'profile': varxravel =", "calc_spatialCorrHeight(varx,vary,lats,lons,weight) [8] calc_spatialCorrHeightLev(varx,vary,lats,lons,weight,levelq) \"\"\" def calcDecJan(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average", "0 if i >= 12: counter = i//12 djappendh =", "no) Returns ------- rmse : 1d array Usage ----- rmse", "np.cos(np.deg2rad(lats[mask])) meanvar[ens,i,j] = np.nansum(varmask*areamask) \\ /np.sum(areamask) elif var.ndim == 4:", "vary_djf = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i in range(0,varxravel.shape[0]-12,12): counter =", "int(lat.shape[0]),int(lon.shape[0])) ) for i in range(0,varxravel.shape[0]-12,12): counter = 0 if", "= calcDecJan(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan function!') ### Import modules", "vary : 2d array levs : 1d array of levels", "level == 'surface': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel =", "- m(y, w))) / np.sum(w) print('Completed: Computed weighted covariance!') return", "Calculates root mean square weighted average Parameters ---------- varx :", "Levels)!') return wave def cov(x, y, w): \"\"\"Weighted Covariance\"\"\" wcov", "spatial correlation from pearson correlation coefficient for grids over vertical", "== 4: meanvar = np.empty((var.shape[0],var.shape[1])) for i in range(var.shape[0]): for", "\\ /np.sum(areamask) elif var.ndim == 4: meanvar = np.empty((var.shape[0],var.shape[1])) for", "1d array of levels lons : 1d array of latitude", "average for various dimensional arrays if var.ndim == 5: meanvar", "= cov(x, y, w) / np.sqrt(cov(x, x, w) * cov(y,", "calcDecJanFeb(varx,vary,lat,lon,level,levsq) [3] calc_indttest(varx,vary) [4] calc_weightedAve(var,lats) [5] calc_spatialCorr(varx,vary,lats,lons,weight) [6] calc_RMSE(varx,vary,lats,lons,weight) [7]", "varx_djf = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_djf = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i in", "dimensional arrays if var.ndim == 5: meanvar = np.empty((var.shape[0],var.shape[1],var.shape[2])) for", "in range(var.shape[0]): varq = var[i,:,:] mask = np.isfinite(varq) & np.isfinite(lats)", "Root mean square error from sklearn (not weighted) rmse =", "= np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for i in range(0,varxravel.shape[0]-12,12): counter =", "print('\\n>>> Using calc_spatialCorrHeightLev function!') ### Import modules import numpy as", "axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for 4d", "lats[latq] varx = varx[latq,:] vary = vary[latq,:] print('MASKING LATITUDES!') ###", "np.cos(np.deg2rad(lats[mask])) meanvar[i] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 2: meanvar =", "np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar", "or no) Returns ------- corrcoef : 1d array of correlation", "np.append(djfappendf1, varyravel[13+i,:,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf,", "sq_err = (varx - vary)**2 rmse = np.sqrt((np.sum(sq_err*gw))/np.sum(gw)) elif weight", "import mean_squared_error if weight == 'yes': # Computed weighted correlation", ": 2d array of latitudes Returns ------- meanvar : weighted", "[year,month,lat,lon] or [year,month,lev,lat,lon] vary : 4d array or 5d array", "print(ValueError('Selected wrong height - (surface or profile!)!')) print('Completed: Organized data", "& np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i] =", ": string (yes or no) Returns ------- corrcoef : 1d", "/ np.sum(w) print('Completed: Computed weighted average!') return wave def cov(x,", "* w) / np.sum(w) print('Completed: Computed weighted average!') return wave", "(pearson r) Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons,levels) \"\"\" print('\\n>>> Using", "elif levelq == 'tropo': gwq = np.array([1.0,1.0,1.0,1.0,0.5,0.5,0.5,0.2,0.2,0.,0.,0., 0.,0.,0.,0.,0.]) gw,gw2 =", "correlation coefficient for grids over vertical height (17 pressure coordinate", "pressure coordinate levels). Change the weighting for different level correlations", "array [year,lat,lon] or [year,lev,lat,lon] Usage ----- varx_dj,vary_dj = calcDecJan(varx,vary,lat,lon,level,levsq) \"\"\"", "= np.meshgrid(lons,gwq) def m(x, w): \"\"\"Weighted Mean\"\"\" wave = np.sum(x", "5d array [year,month,lat,lon] or [year,month,lev,lat,lon] vary : 4d array or", "= np.nansum(varmask*areamask)/np.sum(areamask) else: print(ValueError('Variable has the wrong dimensions!')) print('Completed: Weighted", "integer number of levels Returns ------- varx_dj : 3d array", "weighted average Parameters ---------- varx : 2d array vary :", "as np if weight == 'yes': # Computed weighted correlation", "weights based on latitude gw = np.cos(np.deg2rad(lat2)) ### Calculate rmse", "array or 5d array [year,month,lat,lon] or [year,month,lev,lat,lon] vary : 4d", "from pearson correlation coefficient Parameters ---------- varx : 2d array", "varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_dj", "Functions are useful untilities for SITperturb experiments Notes ----- Author", "np.nanmean(np.reshape(djappendf, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height - (surface or", "3d array or 4d array [year,lat,lon] or [year,lev,lat,lon] vary_dj :", "function!') ### Import modules import numpy as np from sklearn.metrics", "corrcoef ############################################################################### ############################################################################### ############################################################################### def calc_RMSE(varx,vary,lats,lons,weight): \"\"\" Calculates root mean", "calc_spatialCorr(varx,vary,lats,lons,weight): \"\"\" Calculates spatial correlation from pearson correlation coefficient Parameters", "varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim ==", "np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 3: meanvar = np.empty((var.shape[0])) for i", "= np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_dj = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i in range(0,varxravel.shape[0]-12,12):", "(ON,DJ,FM)!') print('*Completed: Finished calcDecJan function!') return varx_dj,vary_dj ############################################################################### ############################################################################### ###############################################################################", "over vertical height (17 pressure coordinate levels) Parameters ---------- varx", "Computed weighted correlation (17 P Levels)!') return wcor corrcoef =", "print(ValueError('Variable has the wrong dimensions!')) print('Completed: Weighted variable average!') print('*Completed:", "== 'surface': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(),", "i in range(var.shape[1]): for j in range(var.shape[2]): varq = var[ens,i,j,:,:]", "profile!)!')) print('Completed: Organized data by months (ON,DJ,FM)!') print('*Completed: Finished calcDecJan", "number of levels Returns ------- varx_dj : 3d array or", "string (yes or no) levelq : string (all, tropo, strato)", "[year,month,lev,lat,lon] lat : 1d numpy array latitudes lon : 1d", "coefficient (pearson r) Usage ----- corrcoef = calc_spatialCorr(varx,vary,lats,lons) \"\"\" print('\\n>>>", "ValueError('Wrong weighted argument in function!') print('*Completed: Finished calc_SpatialCorrHeight function!') return", "wrong dimensions!')) print('Completed: Weighted variable average!') print('*Completed: Finished calc_weightedAve function!')", "Usage ----- rmse = calc_RMSE(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_RMSE function!')", "weighted average for 3d,2d,1d array Usage ----- meanvar = calc_weightedAve(var,lats)", ": 2d array vary : 2d array lons : 1d", "varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_djf =", "or profile!)!')) print('Completed: Organized data by months (DJF)!') print('*Completed: Finished", "meanvar = np.empty((var.shape[0])) for i in range(var.shape[0]): varq = var[i,:,:]", "number of levels Returns ------- varx_djf : 3d array or", "calcDecJanFeb(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average for December-January-February Parameters ---------- varx", "np from sklearn.metrics import mean_squared_error if weight == 'yes': #", "2d array levs : 1d array of levels lons :", "= calc_ttest(varx,vary) \"\"\" print('\\n>>> Using calc_ttest function!') ### Import modules", "np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i]", "wcov = np.sum(w * (x - m(x, w)) * (y", "= np.meshgrid(lons,levs) if levelq == 'all': ### Create 2d array", "variable average!') print('*Completed: Finished calc_weightedAve function!') return meanvar ############################################################################### ###############################################################################", "[2] calcDecJanFeb(varx,vary,lat,lon,level,levsq) [3] calc_indttest(varx,vary) [4] calc_weightedAve(var,lats) [5] calc_spatialCorr(varx,vary,lats,lons,weight) [6] calc_RMSE(varx,vary,lats,lons,weight)", "print('\\n>>> Using calc_spatialCorrHeight function!') ### Import modules import numpy as", "grids over vertical height (17 pressure coordinate levels) Parameters ----------", "correlation from pearson correlation coefficient Parameters ---------- varx : 2d", "variable (surface or profile) levsq : integer number of levels", "Calculate rmse sq_err = (varx - vary)**2 rmse = np.sqrt((np.sum(sq_err*gw))/np.sum(gw))", "root mean square weighted average Parameters ---------- varx : 2d", "= np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_djf = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for i", "varx_dj : 3d array or 4d array [year,lat,lon] or [year,lev,lat,lon]", "NON-weighted correlation!') else: ValueError('Wrong weighted arguement in function!') print('*Completed: Finished", "np.meshgrid(lons,gwq) elif levelq == 'tropo': gwq = np.array([1.0,1.0,1.0,1.0,0.5,0.5,0.5,0.2,0.2,0.,0.,0., 0.,0.,0.,0.,0.]) gw,gw2", "2d array lats : 1d array lons : 1d array", "3d array [year,lat,lon] or [year,lev,lat,lon] Usage ----- varx_djf,vary_djf = calcDecJanFeb(varx,vary,lat,lon,level,levsq)", "'no': ### Root mean square error from sklearn (not weighted)", "or [year,lev,lat,lon] Usage ----- varx_djf,vary_djf = calcDecJanFeb(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using", "of levels Returns ------- varx_dj : 3d array or 4d", "(surface or profile!)!')) print('Completed: Organized data by months (DJF)!') print('*Completed:", "var[:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask] areamask", "= lats[latq] varx = varx[latq,:] vary = vary[latq,:] print('MASKING LATITUDES!')", "arguement in function!') print('*Completed: Finished calc_RMSE function!') return rmse ###############################################################################", "= np.nanmean(np.reshape(djfappendf, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height - (surface", "correlation!') else: ValueError('Wrong weighted argument in function!') print('*Completed: Finished calc_SpatialCorrHeightLev", "1d numpy array longitudes level : string Height of variable", "import numpy as np ### Reshape for 3d variables if", "############################################################################### ############################################################################### def calc_indttest(varx,vary): \"\"\" Function calculates statistical difference for", "3d,2d,1d array Usage ----- meanvar = calc_weightedAve(var,lats) \"\"\" print('\\n>>> Using", "2d array of weights based on latitude gw = np.cos(np.deg2rad(lat2))", "1d array of latitude weight : string (yes or no)", "mean_squared_error if weight == 'yes': # Computed weighted correlation coefficient", "= np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i in range(0,varxravel.shape[0]-12,12): counter = 0", "function!') print('*Completed: Finished calc_SpatialCorr function!') return corrcoef ############################################################################### ############################################################################### ###############################################################################", "covariance (17 P Levels)!') return wcov def corr(x, y, w):", "latitudes lon : 1d numpy array longitudes level : string", "modules import numpy as np ### Calculate weighted average for", "stat,pvalue = sts.ttest_ind(varx,vary,nan_policy='omit') ### Significant at 95% confidence level pvalue[np.where(pvalue", "Notes ----- Author : <NAME> Date : 13 August 2017", "latitude gw = np.cos(np.deg2rad(lat2)) def m(x, w): \"\"\"Weighted Mean\"\"\" wave", ">= 12: counter = i//12 djappendh = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djappendf =", "varyravel[12+i,:,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,levsq,int(lat.shape[0]),", "mask = 'yes' if mask == 'yes': latq = np.where(lats", "print('Completed: Computed weighted average!') return wave def cov(x, y, w):", "varq = var[i,j,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask =", "2d array lons : 1d array of latitude weight :", "wcov def corr(x, y, w): \"\"\"Weighted Correlation\"\"\" wcor = cov(x,", "\"\"\" print('\\n>>> Using calc_RMSE function!') ### Import modules import numpy", "= np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djappendf = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh,", "on latitude gw = np.cos(np.deg2rad(lat2)) ### Calculate rmse sq_err =", "[year,lat,lon] or [year,lev,lat,lon] Usage ----- varx_djf,vary_djf = calcDecJanFeb(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>>", "Finished calc_RMSE function!') return rmse ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeight(varx,vary,levs,lons,weight):", "longitudes level : string Height of variable (surface or profile)", "levelq == 'all': ### Create 2d array of weights based", "varx_djf : 3d array or 4d array [year,lat,lon] or [year,lev,lat,lon]", "(int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_djf = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))", "---------- varx : 2d array vary : 2d array lats", "covariance!') return wcov def corr(x, y, w): \"\"\"Weighted Correlation\"\"\" wcor", "np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i,j] = np.nansum(varmask*areamask)/np.sum(areamask)", "or 4d array [year,lat,lon] or [year,lev,lat,lon] vary_djf : 3d array", "(yes or no) Returns ------- rmse : 1d array Usage", "calc_weightedAve(var,lats): \"\"\" Area weights sit array 5d [ens,year,month,lat,lon] into [ens,year,month]", "gw,gw2 = np.meshgrid(lons,gwq) elif levelq == 'strato': gwq = np.array([0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.5,1.,1.,1.,1.", "= calc_spatialCorr(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorr function!') ### Import modules", "counter = i//12 djappendh = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djappendf = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) varx_dj[counter,:,:]", "for weights lon2,lev2 = np.meshgrid(lons,levs) if levelq == 'all': ###", "print('*Completed: Finished calc_weightedAve function!') return meanvar ############################################################################### ############################################################################### ############################################################################### def", "3: meanvar = np.empty((var.shape[0])) for i in range(var.shape[0]): varq =", "array of correlation coefficient (pearson r) Usage ----- corrcoef =", "levels) Parameters ---------- varx : 2d array vary : 2d", "2d meshgrid for weights lon2,lat2 = np.meshgrid(lons,lats) ### Create 2d", "varx_dj = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_dj = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for", "Calculates spatial correlation from pearson correlation coefficient for grids over", "[6] calc_RMSE(varx,vary,lats,lons,weight) [7] calc_spatialCorrHeight(varx,vary,lats,lons,weight) [8] calc_spatialCorrHeightLev(varx,vary,lats,lons,weight,levelq) \"\"\" def calcDecJan(varx,vary,lat,lon,level,levsq): \"\"\"", "\"\"\" Area weights sit array 5d [ens,year,month,lat,lon] into [ens,year,month] Parameters", "5d,4d,3d array of a gridded variable lats : 2d array", "& np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i,j] =", "Usage ----- stat,pvalue = calc_ttest(varx,vary) \"\"\" print('\\n>>> Using calc_ttest function!')", "np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djappendf = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_dj[counter,:,:]", "pearson correlation coefficient Parameters ---------- varx : 2d array vary", "= var[ens,i,j,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask]", ": 5d,4d,3d array of a gridded variable lats : 2d", "calcDecJan(varx,vary,lat,lon,level,levsq) [2] calcDecJanFeb(varx,vary,lat,lon,level,levsq) [3] calc_indttest(varx,vary) [4] calc_weightedAve(var,lats) [5] calc_spatialCorr(varx,vary,lats,lons,weight) [6]", "= 0 if i >= 12: counter = i//12 djappendh", "13 August 2017 Usage ----- [1] calcDecJan(varx,vary,lat,lon,level,levsq) [2] calcDecJanFeb(varx,vary,lat,lon,level,levsq) [3]", "djfappendf1 = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) djfappendh = np.append(djfappendh1,varxravel[13+i,:,:]) djfappendf = np.append(djfappendf1,varyravel[13+i,:,:]) varx_djf[counter,:,:]", "np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong weighted argument in", "sklearn (not weighted) rmse = np.sqrt(mean_squared_error(varx.ravel(),vary.ravel())) print('Completed: Computed NON-weighted correlation!')", "### Create 2d meshgrid for weights lon2,lev2 = np.meshgrid(lons,levs) ###", "---------- varx : 2d array vary : 2d array levs", "sample t-test stat,pvalue = sts.ttest_ind(varx,vary,nan_policy='omit') ### Significant at 95% confidence", "= np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_djf =", "average for December-January Parameters ---------- varx : 4d array or", "array [year,lat,lon] or [year,lev,lat,lon] vary_djf : 3d array [year,lat,lon] or", "varq = var[ens,i,j,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask =", "y, w): \"\"\"Weighted Covariance\"\"\" wcov = np.sum(w * (x -", "array Usage ----- meanvar = calc_weightedAve(var,lats) \"\"\" print('\\n>>> Using calc_weightedAve", "= np.reshape(varx.copy(), (int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_dj", ": 4d array or 5d array [year,month,lat,lon] or [year,month,lev,lat,lon] vary", "corr(varx,vary,gw) elif weight == 'no': ### Correlation coefficient from numpy", "np.array([0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.5,1.,1.,1.,1. ,1.,1.]) gw,gw2 = np.meshgrid(lons,gwq) def m(x, w): \"\"\"Weighted Mean\"\"\"", "(all, tropo, strato) Returns ------- corrcoef : 1d array of", "calculated t-statistic pvalue = two-tailed p-value Usage ----- stat,pvalue =", "[8] calc_spatialCorrHeightLev(varx,vary,lats,lons,weight,levelq) \"\"\" def calcDecJan(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average for", "calc_ttest function!') return stat,pvalue ############################################################################### ############################################################################### ############################################################################### def calc_weightedAve(var,lats): \"\"\"", "= np.meshgrid(lons,gwq) elif levelq == 'strato': gwq = np.array([0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.5,1.,1.,1.,1. ,1.,1.])", "(int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_djf = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for", "meshgrid for weights lon2,lev2 = np.meshgrid(lons,levs) if levelq == 'all':", "weight == 'no': ### Root mean square error from sklearn", "1d array Usage ----- rmse = calc_RMSE(varx,vary,lats,lons) \"\"\" print('\\n>>> Using", "= varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i,j] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim", "weights based on latitude gw = np.cos(np.deg2rad(lat2)) def m(x, w):", "### Import modules import numpy as np ### Calculate weighted", "(3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height - (surface or profile!)!'))", "int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_dj = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) )", "months (DJF)!') print('*Completed: Finished calcDecJanFeb function!') return varx_djf,vary_djf ############################################################################### ###############################################################################", "calculates statistical difference for 2 independent sample t-test Parameters ----------", "------- meanvar : weighted average for 3d,2d,1d array Usage -----", "wrong height - (surface or profile!)!')) print('Completed: Organized data by", "Author : <NAME> Date : 13 August 2017 Usage -----", "print('*Completed: Finished calcDecJanFeb function!') return varx_djf,vary_djf ############################################################################### ############################################################################### ############################################################################### def", "= var[i,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask]", "weighted correlation coefficient ### Create 2d meshgrid for weights lon2,lev2", "return varx_djf,vary_djf ############################################################################### ############################################################################### ############################################################################### def calc_indttest(varx,vary): \"\"\" Function calculates", "- vary)**2 rmse = np.sqrt((np.sum(sq_err*gw))/np.sum(gw)) elif weight == 'no': ###", "or [year,lev,lat,lon] Usage ----- varx_dj,vary_dj = calcDecJan(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using", "vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for 4d variables", "w) * cov(y, y, w)) print('Completed: Computed weighted correlation!') return", "t-test stat,pvalue = sts.ttest_ind(varx,vary,nan_policy='omit') ### Significant at 95% confidence level", "are useful untilities for SITperturb experiments Notes ----- Author :", "Usage ----- varx_dj,vary_dj = calcDecJan(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan function!')", "np.append(djfappendh1,varxravel[13+i,:,:]) djfappendf = np.append(djfappendf1,varyravel[13+i,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_djf[counter,:,:]", "weights lon2,lev2 = np.meshgrid(lons,levs) ### Create 2d array of weights", "np.sum(w) print('Completed: Computed weighted average!') return wave def cov(x, y,", "coefficient for grids over vertical height (17 pressure coordinate levels).", "experiments Notes ----- Author : <NAME> Date : 13 August", "correlation!') else: ValueError('Wrong weighted arguement in function!') print('*Completed: Finished calc_SpatialCorr", "calc_RMSE function!') return rmse ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeight(varx,vary,levs,lons,weight): \"\"\"", "wcor corrcoef = corr(varx,vary,gw) elif weight == 'no': ### Correlation", "print('\\n>>> Using calc_ttest function!') ### Import modules import numpy as", "w): \"\"\"Weighted Correlation\"\"\" wcor = cov(x, y, w) / np.sqrt(cov(x,", "ens in range(var.shape[0]): for i in range(var.shape[1]): for j in", "rmse = np.sqrt((np.sum(sq_err*gw))/np.sum(gw)) elif weight == 'no': ### Root mean", "Correlation\"\"\" wcor = cov(x, y, w) / np.sqrt(cov(x, x, w)", "print('*Completed: Finished calc_SpatialCorr function!') return corrcoef ############################################################################### ############################################################################### ############################################################################### def", "############################################################################### ############################################################################### ############################################################################### def calc_indttest(varx,vary): \"\"\" Function calculates statistical difference", "Function calculates average for December-January Parameters ---------- varx : 4d", "\"\"\" print('\\n>>> Using calcDecJan function!') ### Import modules import numpy", "ValueError('Wrong weighted arguement in function!') print('*Completed: Finished calc_RMSE function!') return", "int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])))", "latitude gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) elif levelq", "corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorrHeight function!') ### Import", "meanvar[ens,i,j] = np.nansum(varmask*areamask) \\ /np.sum(areamask) elif var.ndim == 4: meanvar", "coefficient ### mask mask = 'yes' if mask == 'yes':", "w)) print('Completed: Computed weighted correlation (17 P Levels)!') return wcor", "tropo, strato) Returns ------- corrcoef : 1d array of correlation", "array or 5d array [year,month,lat,lon] or [year,month,lev,lat,lon] lat : 1d", "Finished calc_SpatialCorrHeight function!') return corrcoef ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeightLev(varx,vary,levs,lons,weight,levelq):", "grids over vertical height (17 pressure coordinate levels). Change the", "array of latitude weight : string (yes or no) levelq", "m(x, w)) * (y - m(y, w))) / np.sum(w) print('Completed:", "= calculated t-statistic pvalue = two-tailed p-value Usage ----- stat,pvalue", "calc_RMSE(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_RMSE function!') ### Import modules import", "in function!') print('*Completed: Finished calc_RMSE function!') return rmse ############################################################################### ###############################################################################", "(2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for", "coefficient ### Create 2d meshgrid for weights lon2,lev2 = np.meshgrid(lons,levs)", "### mask mask = 'yes' if mask == 'yes': latq", "varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_dj =", "### Import modules import numpy as np if weight ==", "stat,pvalue ############################################################################### ############################################################################### ############################################################################### def calc_weightedAve(var,lats): \"\"\" Area weights sit", "int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height - (surface or profile!)!')) print('Completed:", "array levs : 1d array of levels lons : 1d", "varx_dj = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_dj = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i in", "np.where(lats > 40)[0] lats = lats[latq] varx = varx[latq,:] vary", "= vary[latq,:] print('MASKING LATITUDES!') ### Create 2d meshgrid for weights", "array Usage ----- rmse = calc_RMSE(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_RMSE", "int(lon.shape[0]))),axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height", ">= 12: counter = i//12 djappendh = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djappendf", "for various dimensional arrays if var.ndim == 5: meanvar =", "int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_djf = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) )", "rmse = calc_RMSE(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_RMSE function!') ### Import", "np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf,", "numpy as np if weight == 'yes': # Computed weighted", "profile) levsq : integer number of levels Returns ------- varx_dj", "Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons,levels) \"\"\" print('\\n>>> Using calc_spatialCorrHeightLev function!')", "\"\"\" Function calculates average for December-January-February Parameters ---------- varx :", ": 4d array or 5d array [year,month,lat,lon] or [year,month,lev,lat,lon] lat", ": 3d array Returns ------- stat = calculated t-statistic pvalue", "error from sklearn (not weighted) rmse = np.sqrt(mean_squared_error(varx.ravel(),vary.ravel())) print('Completed: Computed", "np.empty((var.shape[0],var.shape[1])) for i in range(var.shape[0]): for j in range(var.shape[1]): varq", "in range(0,varxravel.shape[0]-12,12): counter = 0 if i >= 12: counter", "import numpy as np ### Calculate weighted average for various", "gw = np.cos(np.deg2rad(lat2)) ### Calculate rmse sq_err = (varx -", "= np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_dj = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for i", "############################################################################### ############################################################################### def calc_spatialCorrHeightLev(varx,vary,levs,lons,weight,levelq): \"\"\" Calculates spatial correlation from pearson", "def corr(x, y, w): \"\"\"Weighted Correlation\"\"\" wcor = cov(x, y,", "0.,0.,0.,0.,0.]) gw,gw2 = np.meshgrid(lons,gwq) elif levelq == 'strato': gwq =", "function!') print('*Completed: Finished calc_RMSE function!') return rmse ############################################################################### ############################################################################### ###############################################################################", "Function calculates average for December-January-February Parameters ---------- varx : 4d", "0.05)] = 1. print('*Completed: Finished calc_ttest function!') return stat,pvalue ###############################################################################", "Levels)!') return wcor corrcoef = corr(varx,vary,gw) elif weight == 'no':", "calc_spatialCorrHeightLev function!') ### Import modules import numpy as np if", "in range(var.shape[0]): for j in range(var.shape[1]): varq = var[i,j,:,:] mask", ": 2d array vary : 2d array levs : 1d", "dimensions!')) print('Completed: Weighted variable average!') print('*Completed: Finished calc_weightedAve function!') return", "----- Author : <NAME> Date : 13 August 2017 Usage", "= np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) elif levelq == 'tropo':", "if i >= 12: counter = i//12 djfappendh1 = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:])", "array [year,lat,lon] or [year,lev,lat,lon] Usage ----- varx_djf,vary_djf = calcDecJanFeb(varx,vary,lat,lon,level,levsq) \"\"\"", "return stat,pvalue ############################################################################### ############################################################################### ############################################################################### def calc_weightedAve(var,lats): \"\"\" Area weights", "np.meshgrid(lons,levs) ### Create 2d array of weights based on latitude", "varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i,j] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim ==", "############################################################################### ############################################################################### def calc_spatialCorr(varx,vary,lats,lons,weight): \"\"\" Calculates spatial correlation from pearson", "= np.array([0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.5,1.,1.,1.,1. ,1.,1.]) gw,gw2 = np.meshgrid(lons,gwq) def m(x, w): \"\"\"Weighted", "height - (surface or profile!)!')) print('Completed: Organized data by months", "Computed weighted average!') return wave def cov(x, y, w): \"\"\"Weighted", "np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for i in range(0,varxravel.shape[0]-12,12): counter = 0", "pvalue[np.where(pvalue >= 0.05)] = np.nan pvalue[np.where(pvalue < 0.05)] = 1.", ": weighted average for 3d,2d,1d array Usage ----- meanvar =", "axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for 4d", "Calculates spatial correlation from pearson correlation coefficient Parameters ---------- varx", "weights lon2,lat2 = np.meshgrid(lons,lats) ### Create 2d array of weights", "weighted argument in function!') print('*Completed: Finished calc_SpatialCorrHeight function!') return corrcoef", "variables if level == 'surface': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0])))", "= np.nanmean(np.reshape(djfappendh, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else:", "= np.cos(np.deg2rad(lat2)) ### Calculate rmse sq_err = (varx - vary)**2", "(x - m(x, w)) * (y - m(y, w))) /", "array or 4d array [year,lat,lon] or [year,lev,lat,lon] vary_djf : 3d", "############################################################################### ############################################################################### ############################################################################### def calc_RMSE(varx,vary,lats,lons,weight): \"\"\" Calculates root mean square", "array of weights based on latitude gw = np.cos(np.deg2rad(lat2)) ###", "no) levelq : string (all, tropo, strato) Returns ------- corrcoef", "numpy array latitudes lon : 1d numpy array longitudes level", "0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) def m(x, w): \"\"\"Weighted Mean\"\"\" wave", "latitude gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) def m(x,", "= varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim", "December-January-February Parameters ---------- varx : 4d array or 5d array", "= i//12 djfappendh1 = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) djfappendh =", "- (surface or profile!)!')) print('Completed: Organized data by months (ON,DJ,FM)!')", "calcDecJan(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan function!') ### Import modules import", "average (17 P Levels)!') return wave def cov(x, y, w):", "(int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((int(varx.shape[0]-1),levsq,", "np ### Reshape for 3d variables if level == 'surface':", "as np from sklearn.metrics import mean_squared_error if weight == 'yes':", "def calc_indttest(varx,vary): \"\"\" Function calculates statistical difference for 2 independent", "numpy as np ### Reshape for 3d variables if level", "Finished calc_weightedAve function!') return meanvar ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorr(varx,vary,lats,lons,weight):", "weighted average!') return wave def cov(x, y, w): \"\"\"Weighted Covariance\"\"\"", "Covariance\"\"\" wcov = np.sum(w * (x - m(x, w)) *", "or [year,lev,lat,lon] vary_dj : 3d array [year,lat,lon] or [year,lev,lat,lon] Usage", "3d variables if level == 'surface': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12),", "has the wrong dimensions!')) print('Completed: Weighted variable average!') print('*Completed: Finished", "varx : 2d array vary : 2d array lats :", "from pearson correlation coefficient for grids over vertical height (17", "in range(var.shape[0]): for i in range(var.shape[1]): for j in range(var.shape[2]):", "level == 'profile': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel =", "------- varx_dj : 3d array or 4d array [year,lat,lon] or", "### Root mean square error from sklearn (not weighted) rmse", "40)[0] lats = lats[latq] varx = varx[latq,:] vary = vary[latq,:]", "calc_indttest(varx,vary) [4] calc_weightedAve(var,lats) [5] calc_spatialCorr(varx,vary,lats,lons,weight) [6] calc_RMSE(varx,vary,lats,lons,weight) [7] calc_spatialCorrHeight(varx,vary,lats,lons,weight) [8]", "calculates average for December-January-February Parameters ---------- varx : 4d array", "Computed weighted covariance (17 P Levels)!') return wcov def corr(x,", "np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_dj = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for i in", "calc_indttest(varx,vary): \"\"\" Function calculates statistical difference for 2 independent sample", "= np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) djfappendh = np.append(djfappendh1,varxravel[13+i,:,:]) djfappendf =", "if weight == 'yes': # Computed weighted correlation coefficient ###", "else: ValueError('Wrong weighted argument in function!') print('*Completed: Finished calc_SpatialCorrHeight function!')", "< 0.05)] = 1. print('*Completed: Finished calc_ttest function!') return stat,pvalue", "### Reshape for 3d variables if level == 'surface': varxravel", "array Returns ------- stat = calculated t-statistic pvalue = two-tailed", "ValueError('Wrong weighted argument in function!') print('*Completed: Finished calc_SpatialCorrHeightLev function!') return", "or no) Returns ------- rmse : 1d array Usage -----", "### Create 2d meshgrid for weights lon2,lat2 = np.meshgrid(lons,lats) ###", "else: ValueError('Wrong weighted arguement in function!') print('*Completed: Finished calc_SpatialCorr function!')", "calc_RMSE(varx,vary,lats,lons,weight): \"\"\" Calculates root mean square weighted average Parameters ----------", "\"\"\" def calcDecJan(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average for December-January Parameters", "= np.append(djfappendf1, varyravel[13+i,:,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_djf[counter,:,:] =", "latitudes Returns ------- meanvar : weighted average for 3d,2d,1d array", "= np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 2: meanvar = np.empty((var.shape[0])) varq", "months (ON,DJ,FM)!') print('*Completed: Finished calcDecJan function!') return varx_dj,vary_dj ############################################################################### ###############################################################################", "= varx[latq,:] vary = vary[latq,:] print('MASKING LATITUDES!') ### Create 2d", "= np.sqrt((np.sum(sq_err*gw))/np.sum(gw)) elif weight == 'no': ### Root mean square", "np.nanmean(np.reshape(djfappendh, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected", "coefficient for grids over vertical height (17 pressure coordinate levels)", "Weighted variable average!') print('*Completed: Finished calc_weightedAve function!') return meanvar ###############################################################################", "pearson correlation coefficient for grids over vertical height (17 pressure", "function!') return meanvar ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorr(varx,vary,lats,lons,weight): \"\"\" Calculates", "np.cos(np.deg2rad(lats[mask])) meanvar[i,j] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 3: meanvar =", "height (17 pressure coordinate levels) Parameters ---------- varx : 2d", "int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_djf", "var : 5d,4d,3d array of a gridded variable lats :", "np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i] = np.nansum(varmask*areamask)/np.sum(areamask)", ": 2d array levs : 1d array of levels lons", "= (varx - vary)**2 rmse = np.sqrt((np.sum(sq_err*gw))/np.sum(gw)) elif weight ==", "w))) / np.sum(w) print('Completed: Computed weighted covariance (17 P Levels)!')", "y, w)) print('Completed: Computed weighted correlation (17 P Levels)!') return", "level pvalue[np.where(pvalue >= 0.05)] = np.nan pvalue[np.where(pvalue < 0.05)] =", "corrcoef ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeightLev(varx,vary,levs,lons,weight,levelq): \"\"\" Calculates spatial correlation", "djfappendh1 = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) djfappendh = np.append(djfappendh1,varxravel[13+i,:,:]) djfappendf", "np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_djf =", "np import scipy.stats as sts ### 2-independent sample t-test stat,pvalue", "array vary : 2d array lons : 1d array of", "corr(x, y, w): \"\"\"Weighted Correlation\"\"\" wcor = cov(x, y, w)", "levels Returns ------- varx_dj : 3d array or 4d array", "[year,lat,lon] or [year,lev,lat,lon] vary_dj : 3d array [year,lat,lon] or [year,lev,lat,lon]", "lats : 1d array lons : 1d array of latitude", "correlation coefficient (pearson r) Usage ----- corrcoef = calc_spatialCorr(varx,vary,lats,lons) \"\"\"", "np.cos(np.deg2rad(lat2)) def m(x, w): \"\"\"Weighted Mean\"\"\" wave = np.sum(x *", "two-tailed p-value Usage ----- stat,pvalue = calc_ttest(varx,vary) \"\"\" print('\\n>>> Using", "Function calculates statistical difference for 2 independent sample t-test Parameters", "np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[ens,i,j] = np.nansum(varmask*areamask)", "return wcor corrcoef = corr(varx,vary,gw) elif weight == 'no': ###", "* (x - m(x, w)) * (y - m(y, w)))", "varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0)", "if var.ndim == 5: meanvar = np.empty((var.shape[0],var.shape[1],var.shape[2])) for ens in", "function!') return corrcoef ############################################################################### ############################################################################### ############################################################################### def calc_RMSE(varx,vary,lats,lons,weight): \"\"\" Calculates", "vary : 3d array Returns ------- stat = calculated t-statistic", "import numpy as np if weight == 'yes': # Computed", "m(y, w))) / np.sum(w) print('Completed: Computed weighted covariance (17 P", "weighted average for various dimensional arrays if var.ndim == 5:", "if i >= 12: counter = i//12 djappendh = np.append(varxravel[11+i,:,:,:],", "############################################################################### ############################################################################### def calcDecJanFeb(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average for December-January-February", "Parameters ---------- varx : 3d array vary : 3d array", "modules import numpy as np ### Reshape for 3d variables", "vary)**2 rmse = np.sqrt((np.sum(sq_err*gw))/np.sum(gw)) elif weight == 'no': ### Root", "[year,lat,lon] or [year,lev,lat,lon] Usage ----- varx_dj,vary_dj = calcDecJan(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>>", "w)) * (y - m(y, w))) / np.sum(w) print('Completed: Computed", "= 0 if i >= 12: counter = i//12 djfappendh1", "vary : 2d array lats : 1d array lons :", "lat : 1d numpy array latitudes lon : 1d numpy", "correlations Parameters ---------- varx : 2d array vary : 2d", "on latitude gw = np.cos(np.deg2rad(lat2)) def m(x, w): \"\"\"Weighted Mean\"\"\"", "np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) djfappendh = np.append(djfappendh1,varxravel[13+i,:,:]) djfappendf = np.append(djfappendf1,varyravel[13+i,:,:])", "numpy array longitudes level : string Height of variable (surface", "Create 2d meshgrid for weights lon2,lev2 = np.meshgrid(lons,levs) if levelq", "print('Completed: Computed weighted covariance!') return wcov def corr(x, y, w):", "Computed weighted correlation coefficient ### mask mask = 'yes' if", "2017 Usage ----- [1] calcDecJan(varx,vary,lat,lon,level,levsq) [2] calcDecJanFeb(varx,vary,lat,lon,level,levsq) [3] calc_indttest(varx,vary) [4]", "(2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height - (surface or profile!)!'))", "t-test Parameters ---------- varx : 3d array vary : 3d", "= np.nanmean(np.reshape(djappendh, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ###", "weighted covariance (17 P Levels)!') return wcov def corr(x, y,", "############################################################################### ############################################################################### ############################################################################### def calc_weightedAve(var,lats): \"\"\" Area weights sit array", "95% confidence level pvalue[np.where(pvalue >= 0.05)] = np.nan pvalue[np.where(pvalue <", "3d array Returns ------- stat = calculated t-statistic pvalue =", "\"\"\" print('\\n>>> Using calc_ttest function!') ### Import modules import numpy", "correlation (17 P Levels)!') return wcor corrcoef = corr(varx,vary,gw) elif", "[4] calc_weightedAve(var,lats) [5] calc_spatialCorr(varx,vary,lats,lons,weight) [6] calc_RMSE(varx,vary,lats,lons,weight) [7] calc_spatialCorrHeight(varx,vary,lats,lons,weight) [8] calc_spatialCorrHeightLev(varx,vary,lats,lons,weight,levelq)", "Correlation coefficient from numpy function (not weighted) corrcoef= np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed:", "def m(x, w): \"\"\"Weighted Mean\"\"\" wave = np.sum(x * w)", "numpy as np ### Calculate weighted average for various dimensional", "\"\"\"Weighted Correlation\"\"\" wcor = cov(x, y, w) / np.sqrt(cov(x, x,", "Mean\"\"\" wave = np.sum(x * w) / np.sum(w) print('Completed: Computed", "############################################################################### ############################################################################### ############################################################################### def calcDecJanFeb(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average for", "----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorrHeight function!') ###", "/ np.sqrt(cov(x, x, w) * cov(y, y, w)) print('Completed: Computed", "weighted argument in function!') print('*Completed: Finished calc_SpatialCorrHeightLev function!') return corrcoef", "Import modules import numpy as np import scipy.stats as sts", "1. print('*Completed: Finished calc_ttest function!') return stat,pvalue ############################################################################### ############################################################################### ###############################################################################", "* w) / np.sum(w) print('Completed: Computed weighted average (17 P", "calc_weightedAve function!') ### Import modules import numpy as np ###", "= np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) djfappendh = np.append(djfappendh1,", "'strato': gwq = np.array([0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.5,1.,1.,1.,1. ,1.,1.]) gw,gw2 = np.meshgrid(lons,gwq) def m(x,", "'yes': latq = np.where(lats > 40)[0] lats = lats[latq] varx", "coefficient (pearson r) Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons) \"\"\" print('\\n>>>", "\"\"\" Calculates spatial correlation from pearson correlation coefficient Parameters ----------", "& np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[ens,i,j] =", "----- stat,pvalue = calc_ttest(varx,vary) \"\"\" print('\\n>>> Using calc_ttest function!') ###", "if level == 'surface': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel", "'surface': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12),", "latitude weight : string (yes or no) Returns ------- rmse", "for j in range(var.shape[2]): varq = var[ens,i,j,:,:] mask = np.isfinite(varq)", "= varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar = np.nansum(varmask*areamask)/np.sum(areamask) else: print(ValueError('Variable", "difference for 2 independent sample t-test Parameters ---------- varx :", "in range(var.shape[1]): varq = var[i,j,:,:] mask = np.isfinite(varq) & np.isfinite(lats)", "array vary : 3d array Returns ------- stat = calculated", "weighted correlation (17 P Levels)!') return wcor corrcoef = corr(varx,vary,gw)", "############################################################################### def calcDecJanFeb(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average for December-January-February Parameters", "Create 2d array of weights based on latitude gw =", "function!') return varx_dj,vary_dj ############################################################################### ############################################################################### ############################################################################### def calcDecJanFeb(varx,vary,lat,lon,level,levsq): \"\"\" Function", "Using calc_spatialCorrHeight function!') ### Import modules import numpy as np", "Parameters ---------- varx : 4d array or 5d array [year,month,lat,lon]", "based on latitude gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq)", "[year,lev,lat,lon] Usage ----- varx_dj,vary_dj = calcDecJan(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan", "weights based on latitude gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 =", "of correlation coefficient (pearson r) Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons,levels)", "coefficient (pearson r) Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons,levels) \"\"\" print('\\n>>>", "djfappendh1 = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) djfappendh =", "into [ens,year,month] Parameters ---------- var : 5d,4d,3d array of a", "np.nanmean(np.reshape(djfappendf, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for 4d variables elif level", "= np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) def m(x, w): \"\"\"Weighted", "varx_dj,vary_dj ############################################################################### ############################################################################### ############################################################################### def calcDecJanFeb(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average", "Height of variable (surface or profile) levsq : integer number", "np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_djf = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for i in", "w) * cov(y, y, w)) print('Completed: Computed weighted correlation (17", "varx[latq,:] vary = vary[latq,:] print('MASKING LATITUDES!') ### Create 2d meshgrid", "correlation!') else: ValueError('Wrong weighted argument in function!') print('*Completed: Finished calc_SpatialCorrHeight", ": 3d array [year,lat,lon] or [year,lev,lat,lon] Usage ----- varx_dj,vary_dj =", "djfappendf = np.append(djfappendf1, varyravel[13+i,:,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_djf[counter,:,:]", "of a gridded variable lats : 2d array of latitudes", "[year,lev,lat,lon] Usage ----- varx_djf,vary_djf = calcDecJanFeb(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan", "Computed weighted correlation!') return wcor corrcoef = corr(varx,vary,gw) elif weight", "----- rmse = calc_RMSE(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_RMSE function!') ###", "Using calc_weightedAve function!') ### Import modules import numpy as np", "i in range(var.shape[0]): for j in range(var.shape[1]): varq = var[i,j,:,:]", "i >= 12: counter = i//12 djfappendh1 = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djfappendf1", "cov(y, y, w)) print('Completed: Computed weighted correlation (17 P Levels)!')", "or profile!)!')) print('Completed: Organized data by months (ON,DJ,FM)!') print('*Completed: Finished", "djappendf = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_dj[counter,:,:] =", "import scipy.stats as sts ### 2-independent sample t-test stat,pvalue =", "w)) print('Completed: Computed weighted correlation!') return wcor corrcoef = corr(varx,vary,gw)", "int(lat.shape[0]),int(lon.shape[0]))) vary_djf = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for i in range(0,varxravel.shape[0]-12,12):", "meanvar = calc_weightedAve(var,lats) \"\"\" print('\\n>>> Using calc_weightedAve function!') ### Import", "in function!') print('*Completed: Finished calc_SpatialCorrHeight function!') return corrcoef ############################################################################### ###############################################################################", "varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar = np.nansum(varmask*areamask)/np.sum(areamask) else:", "\"\"\" Function calculates statistical difference for 2 independent sample t-test", "np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong weighted arguement in", "Import modules import numpy as np ### Calculate weighted average", "vary : 4d array or 5d array [year,month,lat,lon] or [year,month,lev,lat,lon]", ": string Height of variable (surface or profile) levsq :", "(int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0]))", "integer number of levels Returns ------- varx_djf : 3d array", "(3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for", "w): \"\"\"Weighted Mean\"\"\" wave = np.sum(x * w) / np.sum(w)", "import numpy as np import scipy.stats as sts ### 2-independent", "function!') return corrcoef ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeightLev(varx,vary,levs,lons,weight,levelq): \"\"\" Calculates", "print('*Completed: Finished calc_SpatialCorrHeight function!') return corrcoef ############################################################################### ############################################################################### ############################################################################### def", "levels Returns ------- varx_djf : 3d array or 4d array", "= calc_spatialCorrHeight(varx,vary,lats,lons,levels) \"\"\" print('\\n>>> Using calc_spatialCorrHeightLev function!') ### Import modules", "np.sum(w * (x - m(x, w)) * (y - m(y,", "Computed weighted average (17 P Levels)!') return wave def cov(x,", "rmse = np.sqrt(mean_squared_error(varx.ravel(),vary.ravel())) print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong weighted", "argument in function!') print('*Completed: Finished calc_SpatialCorrHeight function!') return corrcoef ###############################################################################", "of levels Returns ------- varx_djf : 3d array or 4d", "############################################################################### def calc_RMSE(varx,vary,lats,lons,weight): \"\"\" Calculates root mean square weighted average", "array or 4d array [year,lat,lon] or [year,lev,lat,lon] vary_dj : 3d", "---------- varx : 2d array vary : 2d array lons", "mask mask = 'yes' if mask == 'yes': latq =", "meanvar : weighted average for 3d,2d,1d array Usage ----- meanvar", "print('*Completed: Finished calcDecJan function!') return varx_dj,vary_dj ############################################################################### ############################################################################### ############################################################################### def", "function!') return varx_djf,vary_djf ############################################################################### ############################################################################### ############################################################################### def calc_indttest(varx,vary): \"\"\" Function", "print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong weighted argument in function!')", ": 1d array of latitude weight : string (yes or", "djfappendh = np.append(djfappendh1,varxravel[13+i,:,:]) djfappendf = np.append(djfappendf1,varyravel[13+i,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,int(lat.shape[0]),int(lon.shape[0]))),", "= np.nansum(varmask*areamask) \\ /np.sum(areamask) elif var.ndim == 4: meanvar =", "np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) elif levelq == 'tropo': gwq", "else: print(ValueError('Selected wrong height - (surface or profile!)!')) print('Completed: Organized", "else: print(ValueError('Variable has the wrong dimensions!')) print('Completed: Weighted variable average!')", "---------- var : 5d,4d,3d array of a gridded variable lats", "range(var.shape[0]): for j in range(var.shape[1]): varq = var[i,j,:,:] mask =", "np.sqrt((np.sum(sq_err*gw))/np.sum(gw)) elif weight == 'no': ### Root mean square error", "function!') print('*Completed: Finished calc_SpatialCorrHeight function!') return corrcoef ############################################################################### ############################################################################### ###############################################################################", "numpy as np from sklearn.metrics import mean_squared_error if weight ==", "djfappendh = np.append(djfappendh1, varxravel[13+i,:,:,:]) djfappendf = np.append(djfappendf1, varyravel[13+i,:,:,:]) varx_djf[counter,:,:] =", "return meanvar ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorr(varx,vary,lats,lons,weight): \"\"\" Calculates spatial", "mask == 'yes': latq = np.where(lats > 40)[0] lats =", "(17 pressure coordinate levels). Change the weighting for different level", "(2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong", "= np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) djfappendh = np.append(djfappendh1,varxravel[13+i,:,:]) djfappendf = np.append(djfappendf1,varyravel[13+i,:,:]) varx_djf[counter,:,:] =", "profile!)!')) print('Completed: Organized data by months (DJF)!') print('*Completed: Finished calcDecJanFeb", "varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[ens,i,j] = np.nansum(varmask*areamask) \\", "of latitudes Returns ------- meanvar : weighted average for 3d,2d,1d", "* cov(y, y, w)) print('Completed: Computed weighted correlation (17 P", "levelq == 'tropo': gwq = np.array([1.0,1.0,1.0,1.0,0.5,0.5,0.5,0.2,0.2,0.,0.,0., 0.,0.,0.,0.,0.]) gw,gw2 = np.meshgrid(lons,gwq)", "np.sum(w) print('Completed: Computed weighted average (17 P Levels)!') return wave", "based on latitude gw = np.cos(np.deg2rad(lat2)) def m(x, w): \"\"\"Weighted", "Area weights sit array 5d [ens,year,month,lat,lon] into [ens,year,month] Parameters ----------", "meanvar = np.empty((var.shape[0],var.shape[1],var.shape[2])) for ens in range(var.shape[0]): for i in", "- (surface or profile!)!')) print('Completed: Organized data by months (DJF)!')", "(2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for 4d variables elif level ==", "np.nan pvalue[np.where(pvalue < 0.05)] = 1. print('*Completed: Finished calc_ttest function!')", "for i in range(var.shape[0]): varq = var[i,:,:] mask = np.isfinite(varq)", "array lats : 1d array lons : 1d array of", "/ np.sum(w) print('Completed: Computed weighted average (17 P Levels)!') return", "w) / np.sum(w) print('Completed: Computed weighted average (17 P Levels)!')", "meshgrid for weights lon2,lat2 = np.meshgrid(lons,lats) ### Create 2d array", "array of weights based on latitude gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.])", ": integer number of levels Returns ------- varx_djf : 3d", "array 5d [ens,year,month,lat,lon] into [ens,year,month] Parameters ---------- var : 5d,4d,3d", "[7] calc_spatialCorrHeight(varx,vary,lats,lons,weight) [8] calc_spatialCorrHeightLev(varx,vary,lats,lons,weight,levelq) \"\"\" def calcDecJan(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates", "corrcoef = calc_spatialCorr(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorr function!') ### Import", "calc_SpatialCorrHeight function!') return corrcoef ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeightLev(varx,vary,levs,lons,weight,levelq): \"\"\"", "== 'yes': latq = np.where(lats > 40)[0] lats = lats[latq]", "cov(x, y, w) / np.sqrt(cov(x, x, w) * cov(y, y,", "else: ValueError('Wrong weighted arguement in function!') print('*Completed: Finished calc_RMSE function!')", "= np.nanmean(np.reshape(djappendh, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else:", "3d array vary : 3d array Returns ------- stat =", "profile) levsq : integer number of levels Returns ------- varx_djf", "np.meshgrid(lons,lats) ### Create 2d array of weights based on latitude", "over vertical height (17 pressure coordinate levels). Change the weighting", "if i >= 12: counter = i//12 djfappendh1 = np.append(varxravel[11+i,:,:,:],", "np.meshgrid(lons,gwq) elif levelq == 'strato': gwq = np.array([0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.5,1.,1.,1.,1. ,1.,1.]) gw,gw2", "m(y, w))) / np.sum(w) print('Completed: Computed weighted covariance!') return wcov", "[5] calc_spatialCorr(varx,vary,lats,lons,weight) [6] calc_RMSE(varx,vary,lats,lons,weight) [7] calc_spatialCorrHeight(varx,vary,lats,lons,weight) [8] calc_spatialCorrHeightLev(varx,vary,lats,lons,weight,levelq) \"\"\" def", "print('\\n>>> Using calc_spatialCorr function!') ### Import modules import numpy as", "array of latitude weight : string (yes or no) Returns", "np.sum(w) print('Completed: Computed weighted covariance (17 P Levels)!') return wcov", "= varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[ens,i,j] = np.nansum(varmask*areamask) \\ /np.sum(areamask)", "[year,month,lev,lat,lon] vary : 4d array or 5d array [year,month,lat,lon] or", "print('Completed: Organized data by months (ON,DJ,FM)!') print('*Completed: Finished calcDecJan function!')", "lats : 2d array of latitudes Returns ------- meanvar :", "Returns ------- corrcoef : 1d array of correlation coefficient (pearson", "coordinate levels). Change the weighting for different level correlations Parameters", "gw,gw2 = np.meshgrid(lons,gwq) def m(x, w): \"\"\"Weighted Mean\"\"\" wave =", "Computed NON-weighted correlation!') else: ValueError('Wrong weighted arguement in function!') print('*Completed:", "(3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for 4d variables elif level ==", "'yes': # Computed weighted correlation coefficient ### mask mask =", "2d array vary : 2d array lats : 1d array", "----- [1] calcDecJan(varx,vary,lat,lon,level,levsq) [2] calcDecJanFeb(varx,vary,lat,lon,level,levsq) [3] calc_indttest(varx,vary) [4] calc_weightedAve(var,lats) [5]", "= np.cos(np.deg2rad(lats[mask])) meanvar[i] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 2: meanvar", "in range(var.shape[1]): for j in range(var.shape[2]): varq = var[ens,i,j,:,:] mask", ": 3d array or 4d array [year,lat,lon] or [year,lev,lat,lon] vary_dj", "np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar = np.nansum(varmask*areamask)/np.sum(areamask)", "for grids over vertical height (17 pressure coordinate levels) Parameters", "----- varx_djf,vary_djf = calcDecJanFeb(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan function!') ###", "various dimensional arrays if var.ndim == 5: meanvar = np.empty((var.shape[0],var.shape[1],var.shape[2]))", "w): \"\"\"Weighted Covariance\"\"\" wcov = np.sum(w * (x - m(x,", "/np.sum(areamask) elif var.ndim == 4: meanvar = np.empty((var.shape[0],var.shape[1])) for i", "[year,lev,lat,lon] vary_djf : 3d array [year,lat,lon] or [year,lev,lat,lon] Usage -----", "= calc_RMSE(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_RMSE function!') ### Import modules", "stat = calculated t-statistic pvalue = two-tailed p-value Usage -----", "if mask == 'yes': latq = np.where(lats > 40)[0] lats", "= np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_dj[counter,:,:] =", "sts ### 2-independent sample t-test stat,pvalue = sts.ttest_ind(varx,vary,nan_policy='omit') ### Significant", "(varx - vary)**2 rmse = np.sqrt((np.sum(sq_err*gw))/np.sum(gw)) elif weight == 'no':", "height (17 pressure coordinate levels). Change the weighting for different", "areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i,j] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 3:", "function!') return stat,pvalue ############################################################################### ############################################################################### ############################################################################### def calc_weightedAve(var,lats): \"\"\" Area", "corrcoef= np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong weighted argument", "var.ndim == 3: meanvar = np.empty((var.shape[0])) for i in range(var.shape[0]):", "arrays if var.ndim == 5: meanvar = np.empty((var.shape[0],var.shape[1],var.shape[2])) for ens", "np ### Calculate weighted average for various dimensional arrays if", "------- corrcoef : 1d array of correlation coefficient (pearson r)", "(3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong", "lons : 1d array of latitude weight : string (yes", "array of levels lons : 1d array of latitude weight", "2d array of weights based on latitude gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5,", "P Levels)!') return wcor corrcoef = corr(varx,vary,gw) elif weight ==", "weighting for different level correlations Parameters ---------- varx : 2d", "varx_dj,vary_dj = calcDecJan(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan function!') ### Import", "5d array [year,month,lat,lon] or [year,month,lev,lat,lon] lat : 1d numpy array", "varxravel[13+i,:,:,:]) djfappendf = np.append(djfappendf1, varyravel[13+i,:,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0)", "djappendh = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djappendf = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,int(lat.shape[0]),int(lon.shape[0]))),", "= np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf,", "----- varx_dj,vary_dj = calcDecJan(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan function!') ###", "print('Completed: Computed weighted correlation!') return wcor corrcoef = corr(varx,vary,gw) elif", "vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height -", "for December-January Parameters ---------- varx : 4d array or 5d", "varx_djf,vary_djf ############################################################################### ############################################################################### ############################################################################### def calc_indttest(varx,vary): \"\"\" Function calculates statistical", "varx_djf,vary_djf = calcDecJanFeb(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan function!') ### Import", "Change the weighting for different level correlations Parameters ---------- varx", "from sklearn.metrics import mean_squared_error if weight == 'yes': # Computed", "/ np.sum(w) print('Completed: Computed weighted covariance (17 P Levels)!') return", "corrcoef= np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong weighted arguement", "i >= 12: counter = i//12 djfappendh1 = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:])", "### Significant at 95% confidence level pvalue[np.where(pvalue >= 0.05)] =", "print('Completed: Organized data by months (DJF)!') print('*Completed: Finished calcDecJanFeb function!')", "average!') print('*Completed: Finished calc_weightedAve function!') return meanvar ############################################################################### ############################################################################### ###############################################################################", "12: counter = i//12 djfappendh1 = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djfappendf1 =", "string (all, tropo, strato) Returns ------- corrcoef : 1d array", ": 1d numpy array longitudes level : string Height of", "i in range(var.shape[0]): varq = var[i,:,:] mask = np.isfinite(varq) &", "== 'yes': # Computed weighted correlation coefficient ### Create 2d", "if i >= 12: counter = i//12 djappendh = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:])", "= calc_spatialCorrHeight(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorrHeight function!') ### Import modules", "NON-weighted correlation!') else: ValueError('Wrong weighted argument in function!') print('*Completed: Finished", "weighted average (17 P Levels)!') return wave def cov(x, y,", "strato) Returns ------- corrcoef : 1d array of correlation coefficient", "for i in range(0,varxravel.shape[0]-12,12): counter = 0 if i >=", "= np.cos(np.deg2rad(lats[mask])) meanvar[i,j] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 3: meanvar", "############################################################################### ############################################################################### def calc_spatialCorrHeight(varx,vary,levs,lons,weight): \"\"\" Calculates spatial correlation from pearson", "np.empty((var.shape[0])) for i in range(var.shape[0]): varq = var[i,:,:] mask =", "levs : 1d array of levels lons : 1d array", "axis=0) ### Reshape for 4d variables elif level == 'profile':", "varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0])))", "def calc_spatialCorr(varx,vary,lats,lons,weight): \"\"\" Calculates spatial correlation from pearson correlation coefficient", "P Levels)!') return wcov def corr(x, y, w): \"\"\"Weighted Correlation\"\"\"", "gwq = np.array([1.0,1.0,1.0,1.0,0.5,0.5,0.5,0.2,0.2,0.,0.,0., 0.,0.,0.,0.,0.]) gw,gw2 = np.meshgrid(lons,gwq) elif levelq ==", "= np.cos(np.deg2rad(lats[mask])) meanvar = np.nansum(varmask*areamask)/np.sum(areamask) else: print(ValueError('Variable has the wrong", "array of weights based on latitude gw = np.cos(np.deg2rad(lat2)) def", "varx : 3d array vary : 3d array Returns -------", "meshgrid for weights lon2,lev2 = np.meshgrid(lons,levs) ### Create 2d array", "varxravel[12+i,:,:,:]) djappendf = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0)", "gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) elif levelq ==", "at 95% confidence level pvalue[np.where(pvalue >= 0.05)] = np.nan pvalue[np.where(pvalue", "int(lat.shape[0]),int(lon.shape[0]))) vary_dj = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for i in range(0,varxravel.shape[0]-12,12):", "rmse : 1d array Usage ----- rmse = calc_RMSE(varx,vary,lats,lons) \"\"\"", "Using calc_spatialCorrHeightLev function!') ### Import modules import numpy as np", "correlation!') else: ValueError('Wrong weighted arguement in function!') print('*Completed: Finished calc_RMSE", "= np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djappendf = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0)", "\"\"\" print('\\n>>> Using calc_spatialCorr function!') ### Import modules import numpy", "latitude gw = np.cos(np.deg2rad(lat2)) ### Calculate rmse sq_err = (varx", "- m(y, w))) / np.sum(w) print('Completed: Computed weighted covariance (17", "(y - m(y, w))) / np.sum(w) print('Completed: Computed weighted covariance!')", "range(0,varxravel.shape[0]-12,12): counter = 0 if i >= 12: counter =", "print('Completed: Computed weighted covariance (17 P Levels)!') return wcov def", "for 4d variables elif level == 'profile': varxravel = np.reshape(varx.copy(),", "print('Completed: Computed weighted correlation (17 P Levels)!') return wcor corrcoef", "i//12 djfappendh1 = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) djfappendh = np.append(djfappendh1,varxravel[13+i,:,:])", "np.sum(x * w) / np.sum(w) print('Completed: Computed weighted average (17", "w))) / np.sum(w) print('Completed: Computed weighted covariance!') return wcov def", "Finished calcDecJanFeb function!') return varx_djf,vary_djf ############################################################################### ############################################################################### ############################################################################### def calc_indttest(varx,vary):", "### Import modules import numpy as np ### Reshape for", "2-independent sample t-test stat,pvalue = sts.ttest_ind(varx,vary,nan_policy='omit') ### Significant at 95%", "Using calc_RMSE function!') ### Import modules import numpy as np", "np.nanmean(np.reshape(djappendh, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape", "############################################################################### def calc_spatialCorrHeightLev(varx,vary,levs,lons,weight,levelq): \"\"\" Calculates spatial correlation from pearson correlation", "levels lons : 1d array of latitude weight : string", "djfappendf1 = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) djfappendh = np.append(djfappendh1, varxravel[13+i,:,:,:]) djfappendf =", ": string (all, tropo, strato) Returns ------- corrcoef : 1d", "### Create 2d array of weights based on latitude gwq", "2d meshgrid for weights lon2,lev2 = np.meshgrid(lons,levs) ### Create 2d", "calc_spatialCorrHeight(varx,vary,lats,lons,levels) \"\"\" print('\\n>>> Using calc_spatialCorrHeightLev function!') ### Import modules import", "Finished calc_ttest function!') return stat,pvalue ############################################################################### ############################################################################### ############################################################################### def calc_weightedAve(var,lats):", "2d array of latitudes Returns ------- meanvar : weighted average", "1d array of correlation coefficient (pearson r) Usage ----- corrcoef", "of levels lons : 1d array of latitude weight :", "varx : 4d array or 5d array [year,month,lat,lon] or [year,month,lev,lat,lon]", ": 2d array lons : 1d array of latitude weight", "Usage ----- corrcoef = calc_spatialCorr(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorr function!')", "np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i in range(0,varxravel.shape[0]-12,12): counter = 0 if", "variables elif level == 'profile': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0])))", "\"\"\"Weighted Mean\"\"\" wave = np.sum(x * w) / np.sum(w) print('Completed:", "return varx_dj,vary_dj ############################################################################### ############################################################################### ############################################################################### def calcDecJanFeb(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates", "average Parameters ---------- varx : 2d array vary : 2d", "December-January Parameters ---------- varx : 4d array or 5d array", "or 5d array [year,month,lat,lon] or [year,month,lev,lat,lon] lat : 1d numpy", "def calc_spatialCorrHeight(varx,vary,levs,lons,weight): \"\"\" Calculates spatial correlation from pearson correlation coefficient", "= np.where(lats > 40)[0] lats = lats[latq] varx = varx[latq,:]", "elif var.ndim == 2: meanvar = np.empty((var.shape[0])) varq = var[:,:]", "Computed weighted covariance!') return wcov def corr(x, y, w): \"\"\"Weighted", "= np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_dj =", "weights sit array 5d [ens,year,month,lat,lon] into [ens,year,month] Parameters ---------- var", "varx : 2d array vary : 2d array lons :", "level correlations Parameters ---------- varx : 2d array vary :", "############################################################################### ############################################################################### def calc_weightedAve(var,lats): \"\"\" Area weights sit array 5d", "Using calcDecJan function!') ### Import modules import numpy as np", "var.ndim == 2: meanvar = np.empty((var.shape[0])) varq = var[:,:] mask", "np.nanmean(np.reshape(djfappendh, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape", "data by months (ON,DJ,FM)!') print('*Completed: Finished calcDecJan function!') return varx_dj,vary_dj", "np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) def m(x, w): \"\"\"Weighted Mean\"\"\"", "print('*Completed: Finished calc_ttest function!') return stat,pvalue ############################################################################### ############################################################################### ############################################################################### def", ": 1d array lons : 1d array of latitude weight", "or profile) levsq : integer number of levels Returns -------", "mask = np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask] areamask =", "4d array or 5d array [year,month,lat,lon] or [year,month,lev,lat,lon] vary :", "np.meshgrid(lons,gwq) def m(x, w): \"\"\"Weighted Mean\"\"\" wave = np.sum(x *", "= i//12 djfappendh1 = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:])", "j in range(var.shape[2]): varq = var[ens,i,j,:,:] mask = np.isfinite(varq) &", ">= 12: counter = i//12 djfappendh1 = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djfappendf1 =", "range(var.shape[1]): varq = var[i,j,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask", "int(lon.shape[0]))),axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height", "== 'profile': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(),", "= np.nanmean(np.reshape(djappendf, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for 4d variables elif", "elif var.ndim == 4: meanvar = np.empty((var.shape[0],var.shape[1])) for i in", "range(var.shape[1]): for j in range(var.shape[2]): varq = var[ens,i,j,:,:] mask =", "* (y - m(y, w))) / np.sum(w) print('Completed: Computed weighted", "sample t-test Parameters ---------- varx : 3d array vary :", "weights lon2,lev2 = np.meshgrid(lons,levs) if levelq == 'all': ### Create", "def cov(x, y, w): \"\"\"Weighted Covariance\"\"\" wcov = np.sum(w *", "= np.meshgrid(lons,levs) ### Create 2d array of weights based on", "varx = varx[latq,:] vary = vary[latq,:] print('MASKING LATITUDES!') ### Create", "3d array [year,lat,lon] or [year,lev,lat,lon] Usage ----- varx_dj,vary_dj = calcDecJan(varx,vary,lat,lon,level,levsq)", "in function!') print('*Completed: Finished calc_SpatialCorr function!') return corrcoef ############################################################################### ###############################################################################", "array [year,month,lat,lon] or [year,month,lev,lat,lon] vary : 4d array or 5d", "np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_djf = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) )", "rmse ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeight(varx,vary,levs,lons,weight): \"\"\" Calculates spatial correlation", "weighted arguement in function!') print('*Completed: Finished calc_RMSE function!') return rmse", "elif var.ndim == 3: meanvar = np.empty((var.shape[0])) for i in", "= np.nan pvalue[np.where(pvalue < 0.05)] = 1. print('*Completed: Finished calc_ttest", "array of latitudes Returns ------- meanvar : weighted average for", "weight : string (yes or no) levelq : string (all,", "modules import numpy as np from sklearn.metrics import mean_squared_error if", "function!') ### Import modules import numpy as np if weight", "(yes or no) Returns ------- corrcoef : 1d array of", "x, w) * cov(y, y, w)) print('Completed: Computed weighted correlation", "= np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_djf = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i in range(0,varxravel.shape[0]-12,12):", ": 2d array vary : 2d array lats : 1d", "= np.append(djfappendh1,varxravel[13+i,:,:]) djfappendf = np.append(djfappendf1,varyravel[13+i,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0)", "np.nanmean(np.reshape(djfappendf, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height - (surface or", "calcDecJan(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average for December-January Parameters ---------- varx", "calc_weightedAve function!') return meanvar ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorr(varx,vary,lats,lons,weight): \"\"\"", "string (yes or no) Returns ------- corrcoef : 1d array", "np.sqrt(mean_squared_error(varx.ravel(),vary.ravel())) print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong weighted arguement in", "Computed NON-weighted correlation!') else: ValueError('Wrong weighted argument in function!') print('*Completed:", "average for December-January-February Parameters ---------- varx : 4d array or", "vary_dj : 3d array [year,lat,lon] or [year,lev,lat,lon] Usage ----- varx_dj,vary_dj", "'yes': # Computed weighted correlation coefficient ### Create 2d meshgrid", "array vary : 2d array levs : 1d array of", "varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0)", "### 2-independent sample t-test stat,pvalue = sts.ttest_ind(varx,vary,nan_policy='omit') ### Significant at", "vary_dj = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i in range(0,varxravel.shape[0]-12,12): counter =", ": 2d array lats : 1d array lons : 1d", "return rmse ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeight(varx,vary,levs,lons,weight): \"\"\" Calculates spatial", "4: meanvar = np.empty((var.shape[0],var.shape[1])) for i in range(var.shape[0]): for j", "varyravel[12+i,:,:,:]) djfappendh = np.append(djfappendh1, varxravel[13+i,:,:,:]) djfappendf = np.append(djfappendf1, varyravel[13+i,:,:,:]) varx_djf[counter,:,:]", "# Computed weighted correlation coefficient ### mask mask = 'yes'", "weight == 'no': ### Correlation coefficient from numpy function (not", "w) / np.sum(w) print('Completed: Computed weighted average!') return wave def", "== 'strato': gwq = np.array([0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.5,1.,1.,1.,1. ,1.,1.]) gw,gw2 = np.meshgrid(lons,gwq) def", "if levelq == 'all': ### Create 2d array of weights", ",1.,1.]) gw,gw2 = np.meshgrid(lons,gwq) def m(x, w): \"\"\"Weighted Mean\"\"\" wave", "np.cos(np.deg2rad(lats[mask])) meanvar = np.nansum(varmask*areamask)/np.sum(areamask) else: print(ValueError('Variable has the wrong dimensions!'))", "### Calculate rmse sq_err = (varx - vary)**2 rmse =", "np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_dj = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) )", "of variable (surface or profile) levsq : integer number of", "independent sample t-test Parameters ---------- varx : 3d array vary", "array [year,lat,lon] or [year,lev,lat,lon] vary_dj : 3d array [year,lat,lon] or", "> 40)[0] lats = lats[latq] varx = varx[latq,:] vary =", "else: ValueError('Wrong weighted argument in function!') print('*Completed: Finished calc_SpatialCorrHeightLev function!')", "calculates average for December-January Parameters ---------- varx : 4d array", "a gridded variable lats : 2d array of latitudes Returns", "= np.nanmean(np.reshape(djfappendh, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ###", "np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) djfappendh = np.append(djfappendh1, varxravel[13+i,:,:,:]) djfappendf = np.append(djfappendf1, varyravel[13+i,:,:,:])", "elif levelq == 'strato': gwq = np.array([0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.5,1.,1.,1.,1. ,1.,1.]) gw,gw2 =", "pressure coordinate levels) Parameters ---------- varx : 2d array vary", "= sts.ttest_ind(varx,vary,nan_policy='omit') ### Significant at 95% confidence level pvalue[np.where(pvalue >=", "import numpy as np from sklearn.metrics import mean_squared_error if weight", "sklearn.metrics import mean_squared_error if weight == 'yes': # Computed weighted", "(pearson r) Usage ----- corrcoef = calc_spatialCorr(varx,vary,lats,lons) \"\"\" print('\\n>>> Using", "mean square error from sklearn (not weighted) rmse = np.sqrt(mean_squared_error(varx.ravel(),vary.ravel()))", "Calculate weighted average for various dimensional arrays if var.ndim ==", ": <NAME> Date : 13 August 2017 Usage ----- [1]", "t-statistic pvalue = two-tailed p-value Usage ----- stat,pvalue = calc_ttest(varx,vary)", "P Levels)!') return wave def cov(x, y, w): \"\"\"Weighted Covariance\"\"\"", "or no) levelq : string (all, tropo, strato) Returns -------", "Finished calc_SpatialCorr function!') return corrcoef ############################################################################### ############################################################################### ############################################################################### def calc_RMSE(varx,vary,lats,lons,weight):", "Usage ----- varx_djf,vary_djf = calcDecJanFeb(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan function!')", "the weighting for different level correlations Parameters ---------- varx :", "= np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) djfappendh = np.append(djfappendh1, varxravel[13+i,:,:,:]) djfappendf = np.append(djfappendf1,", "print('*Completed: Finished calc_RMSE function!') return rmse ############################################################################### ############################################################################### ############################################################################### def", "function!') ### Import modules import numpy as np ### Calculate", "np.nanmean(np.reshape(djappendh, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected", "scipy.stats as sts ### 2-independent sample t-test stat,pvalue = sts.ttest_ind(varx,vary,nan_policy='omit')", "[ens,year,month] Parameters ---------- var : 5d,4d,3d array of a gridded", "calc_RMSE(varx,vary,lats,lons,weight) [7] calc_spatialCorrHeight(varx,vary,lats,lons,weight) [8] calc_spatialCorrHeightLev(varx,vary,lats,lons,weight,levelq) \"\"\" def calcDecJan(varx,vary,lat,lon,level,levsq): \"\"\" Function", "sts.ttest_ind(varx,vary,nan_policy='omit') ### Significant at 95% confidence level pvalue[np.where(pvalue >= 0.05)]", "weighted) corrcoef= np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong weighted", "vertical height (17 pressure coordinate levels). Change the weighting for", "np.empty((var.shape[0],var.shape[1],var.shape[2])) for ens in range(var.shape[0]): for i in range(var.shape[1]): for", "Organized data by months (ON,DJ,FM)!') print('*Completed: Finished calcDecJan function!') return", ") for i in range(0,varxravel.shape[0]-12,12): counter = 0 if i", "(pearson r) Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons) \"\"\" print('\\n>>> Using", "counter = 0 if i >= 12: counter = i//12", "i in range(0,varxravel.shape[0]-12,12): counter = 0 if i >= 12:", "Import modules import numpy as np ### Reshape for 3d", "of correlation coefficient (pearson r) Usage ----- corrcoef = calc_spatialCorr(varx,vary,lats,lons)", "[year,lev,lat,lon] vary_dj : 3d array [year,lat,lon] or [year,lev,lat,lon] Usage -----", "np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i,j]", "correlation!') return wcor corrcoef = corr(varx,vary,gw) elif weight == 'no':", "in range(var.shape[2]): varq = var[ens,i,j,:,:] mask = np.isfinite(varq) & np.isfinite(lats)", "i//12 djappendh = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djappendf = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) varx_dj[counter,:,:]", "np.append(djfappendf1,varyravel[13+i,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,int(lat.shape[0]),int(lon.shape[0]))),", "cov(x, y, w): \"\"\"Weighted Covariance\"\"\" wcov = np.sum(w * (x", "= calcDecJanFeb(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan function!') ### Import modules", "calcDecJanFeb(varx,vary,lat,lon,level,levsq) \"\"\" print('\\n>>> Using calcDecJan function!') ### Import modules import", "on latitude gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) def", "'profile': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq,", "return wcov def corr(x, y, w): \"\"\"Weighted Correlation\"\"\" wcor =", "useful untilities for SITperturb experiments Notes ----- Author : <NAME>", "'all': ### Create 2d array of weights based on latitude", "lon2,lev2 = np.meshgrid(lons,levs) if levelq == 'all': ### Create 2d", "weight : string (yes or no) Returns ------- corrcoef :", "------- stat = calculated t-statistic pvalue = two-tailed p-value Usage", "var[i,j,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask] areamask", "12: counter = i//12 djappendh = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djappendf = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:])", "(surface or profile!)!')) print('Completed: Organized data by months (ON,DJ,FM)!') print('*Completed:", "y, w) / np.sqrt(cov(x, x, w) * cov(y, y, w))", "4d array [year,lat,lon] or [year,lev,lat,lon] vary_djf : 3d array [year,lat,lon]", "'no': ### Correlation coefficient from numpy function (not weighted) corrcoef=", "x, w) * cov(y, y, w)) print('Completed: Computed weighted correlation!')", "\"\"\"Weighted Covariance\"\"\" wcov = np.sum(w * (x - m(x, w))", "== 'yes': # Computed weighted correlation coefficient ### mask mask", "############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeight(varx,vary,levs,lons,weight): \"\"\" Calculates spatial correlation from", "0 if i >= 12: counter = i//12 djfappendh1 =", "corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons,levels) \"\"\" print('\\n>>> Using calc_spatialCorrHeightLev function!') ### Import", "arguement in function!') print('*Completed: Finished calc_SpatialCorr function!') return corrcoef ###############################################################################", "(17 P Levels)!') return wcor corrcoef = corr(varx,vary,gw) elif weight", "Using calc_spatialCorr function!') ### Import modules import numpy as np", "ValueError('Wrong weighted arguement in function!') print('*Completed: Finished calc_SpatialCorr function!') return", "(17 P Levels)!') return wcov def corr(x, y, w): \"\"\"Weighted", "of weights based on latitude gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2", "4d variables elif level == 'profile': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq,", "calc_SpatialCorr function!') return corrcoef ############################################################################### ############################################################################### ############################################################################### def calc_RMSE(varx,vary,lats,lons,weight): \"\"\"", "= np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_dj = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0])", "== 'no': ### Correlation coefficient from numpy function (not weighted)", "(int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_dj = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))", "calc_spatialCorrHeightLev(varx,vary,levs,lons,weight,levelq): \"\"\" Calculates spatial correlation from pearson correlation coefficient for", "square weighted average Parameters ---------- varx : 2d array vary", "### Import modules import numpy as np from sklearn.metrics import", "confidence level pvalue[np.where(pvalue >= 0.05)] = np.nan pvalue[np.where(pvalue < 0.05)]", "= 'yes' if mask == 'yes': latq = np.where(lats >", "for 3d,2d,1d array Usage ----- meanvar = calc_weightedAve(var,lats) \"\"\" print('\\n>>>", "= np.reshape(varx.copy(), (int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_djf", "areamask = np.cos(np.deg2rad(lats[mask])) meanvar[ens,i,j] = np.nansum(varmask*areamask) \\ /np.sum(areamask) elif var.ndim", "string (yes or no) Returns ------- rmse : 1d array", "corrcoef : 1d array of correlation coefficient (pearson r) Usage", "Import modules import numpy as np from sklearn.metrics import mean_squared_error", "def calcDecJanFeb(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average for December-January-February Parameters ----------", "coefficient from numpy function (not weighted) corrcoef= np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed: Computed", "or 5d array [year,month,lat,lon] or [year,month,lev,lat,lon] vary : 4d array", "vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for 4d variables", "range(var.shape[0]): varq = var[i,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask", "0.05)] = np.nan pvalue[np.where(pvalue < 0.05)] = 1. print('*Completed: Finished", "= np.nanmean(np.reshape(djfappendf, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for 4d variables elif", "coefficient Parameters ---------- varx : 2d array vary : 2d", "= np.sum(x * w) / np.sum(w) print('Completed: Computed weighted average!')", "############################################################################### def calc_spatialCorr(varx,vary,lats,lons,weight): \"\"\" Calculates spatial correlation from pearson correlation", "by months (DJF)!') print('*Completed: Finished calcDecJanFeb function!') return varx_djf,vary_djf ###############################################################################", "areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 2:", "modules import numpy as np import scipy.stats as sts ###", "return wave def cov(x, y, w): \"\"\"Weighted Covariance\"\"\" wcov =", "gridded variable lats : 2d array of latitudes Returns -------", "############################################################################### ############################################################################### ############################################################################### def calc_spatialCorr(varx,vary,lats,lons,weight): \"\"\" Calculates spatial correlation from", "correlation coefficient ### Create 2d meshgrid for weights lon2,lev2 =", "(int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((int(varx.shape[0]-1),levsq,", "varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[ens,i,j] = np.nansum(varmask*areamask) \\ /np.sum(areamask) elif", "correlation coefficient ### mask mask = 'yes' if mask ==", "vary_dj = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for i in range(0,varxravel.shape[0]-12,12): counter", "\"\"\" print('\\n>>> Using calc_weightedAve function!') ### Import modules import numpy", "2d array vary : 2d array levs : 1d array", "- m(x, w)) * (y - m(y, w))) / np.sum(w)", "= np.nanmean(np.reshape(djappendf, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height - (surface", "correlation coefficient Parameters ---------- varx : 2d array vary :", "5: meanvar = np.empty((var.shape[0],var.shape[1],var.shape[2])) for ens in range(var.shape[0]): for i", "calc_spatialCorrHeight(varx,vary,levs,lons,weight): \"\"\" Calculates spatial correlation from pearson correlation coefficient for", "wcor = cov(x, y, w) / np.sqrt(cov(x, x, w) *", "= np.array([1.0,1.0,1.0,1.0,0.5,0.5,0.5,0.2,0.2,0.,0.,0., 0.,0.,0.,0.,0.]) gw,gw2 = np.meshgrid(lons,gwq) elif levelq == 'strato':", "2: meanvar = np.empty((var.shape[0])) varq = var[:,:] mask = np.isfinite(varq)", "Computed weighted correlation coefficient ### Create 2d meshgrid for weights", "calcDecJanFeb function!') return varx_djf,vary_djf ############################################################################### ############################################################################### ############################################################################### def calc_indttest(varx,vary): \"\"\"", "############################################################################### ############################################################################### def calc_RMSE(varx,vary,lats,lons,weight): \"\"\" Calculates root mean square weighted", "calc_spatialCorrHeight(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorrHeight function!') ### Import modules import", "= np.empty((var.shape[0],var.shape[1])) for i in range(var.shape[0]): for j in range(var.shape[1]):", "np.reshape(varx.copy(), (int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_dj =", "meanvar = np.empty((var.shape[0])) varq = var[:,:] mask = np.isfinite(varq) &", "from numpy function (not weighted) corrcoef= np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed: Computed NON-weighted", "data by months (DJF)!') print('*Completed: Finished calcDecJanFeb function!') return varx_djf,vary_djf", "of correlation coefficient (pearson r) Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons)", "calc_spatialCorr function!') ### Import modules import numpy as np if", "calc_weightedAve(var,lats) [5] calc_spatialCorr(varx,vary,lats,lons,weight) [6] calc_RMSE(varx,vary,lats,lons,weight) [7] calc_spatialCorrHeight(varx,vary,lats,lons,weight) [8] calc_spatialCorrHeightLev(varx,vary,lats,lons,weight,levelq) \"\"\"", "12: counter = i//12 djappendh = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djappendf =", "\"\"\" print('\\n>>> Using calc_spatialCorrHeight function!') ### Import modules import numpy", "stat,pvalue = calc_ttest(varx,vary) \"\"\" print('\\n>>> Using calc_ttest function!') ### Import", "corrcoef = corr(varx,vary,gw) elif weight == 'no': ### Correlation coefficient", "for December-January-February Parameters ---------- varx : 4d array or 5d", "= np.sum(w * (x - m(x, w)) * (y -", "= np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_djf", "= np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask]))", "Create 2d meshgrid for weights lon2,lev2 = np.meshgrid(lons,levs) ### Create", "r) Usage ----- corrcoef = calc_spatialCorr(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorr", "spatial correlation from pearson correlation coefficient Parameters ---------- varx :", "Returns ------- varx_djf : 3d array or 4d array [year,lat,lon]", "or [year,month,lev,lat,lon] vary : 4d array or 5d array [year,month,lat,lon]", "np if weight == 'yes': # Computed weighted correlation coefficient", "2d meshgrid for weights lon2,lev2 = np.meshgrid(lons,levs) if levelq ==", ">= 12: counter = i//12 djfappendh1 = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djfappendf1", "Returns ------- stat = calculated t-statistic pvalue = two-tailed p-value", "for grids over vertical height (17 pressure coordinate levels). Change", "5d [ens,year,month,lat,lon] into [ens,year,month] Parameters ---------- var : 5d,4d,3d array", "== 2: meanvar = np.empty((var.shape[0])) varq = var[:,:] mask =", "------- varx_djf : 3d array or 4d array [year,lat,lon] or", "= var[:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask]", "calcDecJan function!') ### Import modules import numpy as np ###", "var[i,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask] areamask", "############################################################################### def calc_spatialCorrHeight(varx,vary,levs,lons,weight): \"\"\" Calculates spatial correlation from pearson correlation", "of weights based on latitude gw = np.cos(np.deg2rad(lat2)) def m(x,", "############################################################################### def calc_indttest(varx,vary): \"\"\" Function calculates statistical difference for 2", "for 2 independent sample t-test Parameters ---------- varx : 3d", "counter = i//12 djappendh = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djappendf = np.append(varyravel[11+i,:,:,:],", "= np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_djf = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0])", "calc_spatialCorrHeightLev(varx,vary,lats,lons,weight,levelq) \"\"\" def calcDecJan(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average for December-January", "(DJF)!') print('*Completed: Finished calcDecJanFeb function!') return varx_djf,vary_djf ############################################################################### ############################################################################### ###############################################################################", "calc_weightedAve(var,lats) \"\"\" print('\\n>>> Using calc_weightedAve function!') ### Import modules import", "3d array or 4d array [year,lat,lon] or [year,lev,lat,lon] vary_djf :", "def calc_spatialCorrHeightLev(varx,vary,levs,lons,weight,levelq): \"\"\" Calculates spatial correlation from pearson correlation coefficient", "Organized data by months (DJF)!') print('*Completed: Finished calcDecJanFeb function!') return", "of latitude weight : string (yes or no) Returns -------", "= np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 3: meanvar = np.empty((var.shape[0])) for", "variable lats : 2d array of latitudes Returns ------- meanvar", "def calc_RMSE(varx,vary,lats,lons,weight): \"\"\" Calculates root mean square weighted average Parameters", "vary = vary[latq,:] print('MASKING LATITUDES!') ### Create 2d meshgrid for", "== 'tropo': gwq = np.array([1.0,1.0,1.0,1.0,0.5,0.5,0.5,0.2,0.2,0.,0.,0., 0.,0.,0.,0.,0.]) gw,gw2 = np.meshgrid(lons,gwq) elif", "np.meshgrid(lons,levs) if levelq == 'all': ### Create 2d array of", "### Import modules import numpy as np import scipy.stats as", "varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0])))", "def calc_weightedAve(var,lats): \"\"\" Area weights sit array 5d [ens,year,month,lat,lon] into", "numpy function (not weighted) corrcoef= np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed: Computed NON-weighted correlation!')", "function!') ### Import modules import numpy as np import scipy.stats", "meanvar = np.nansum(varmask*areamask)/np.sum(areamask) else: print(ValueError('Variable has the wrong dimensions!')) print('Completed:", "y, w)) print('Completed: Computed weighted correlation!') return wcor corrcoef =", "of latitude weight : string (yes or no) levelq :", "= np.sqrt(mean_squared_error(varx.ravel(),vary.ravel())) print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong weighted arguement", "return corrcoef ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeightLev(varx,vary,levs,lons,weight,levelq): \"\"\" Calculates spatial", "i//12 djfappendh1 = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) djfappendh", "w) / np.sqrt(cov(x, x, w) * cov(y, y, w)) print('Completed:", "print('Completed: Computed weighted average (17 P Levels)!') return wave def", "\"\"\" Calculates spatial correlation from pearson correlation coefficient for grids", "np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) djfappendh = np.append(djfappendh1, varxravel[13+i,:,:,:])", "based on latitude gw = np.cos(np.deg2rad(lat2)) ### Calculate rmse sq_err", "### Correlation coefficient from numpy function (not weighted) corrcoef= np.corrcoef(varx.ravel(),vary.ravel())[0][1]", "lats = lats[latq] varx = varx[latq,:] vary = vary[latq,:] print('MASKING", "for 3d variables if level == 'surface': varxravel = np.reshape(varx.copy(),", "= np.append(djfappendf1,varyravel[13+i,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf,", "############################################################################### def calc_weightedAve(var,lats): \"\"\" Area weights sit array 5d [ens,year,month,lat,lon]", "August 2017 Usage ----- [1] calcDecJan(varx,vary,lat,lon,level,levsq) [2] calcDecJanFeb(varx,vary,lat,lon,level,levsq) [3] calc_indttest(varx,vary)", "### Calculate weighted average for various dimensional arrays if var.ndim", "== 'no': ### Root mean square error from sklearn (not", ": 1d numpy array latitudes lon : 1d numpy array", "elif weight == 'no': ### Correlation coefficient from numpy function", "of weights based on latitude gw = np.cos(np.deg2rad(lat2)) ### Calculate", "----- corrcoef = calc_spatialCorr(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorr function!') ###", "----- meanvar = calc_weightedAve(var,lats) \"\"\" print('\\n>>> Using calc_weightedAve function!') ###", "np.array([1.0,1.0,1.0,1.0,0.5,0.5,0.5,0.2,0.2,0.,0.,0., 0.,0.,0.,0.,0.]) gw,gw2 = np.meshgrid(lons,gwq) elif levelq == 'strato': gwq", "np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djappendf = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,levsq,int(lat.shape[0]),", "as np import scipy.stats as sts ### 2-independent sample t-test", "gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) def m(x, w):", "= np.meshgrid(lons,gwq) elif levelq == 'tropo': gwq = np.array([1.0,1.0,1.0,1.0,0.5,0.5,0.5,0.2,0.2,0.,0.,0., 0.,0.,0.,0.,0.])", "= 1. print('*Completed: Finished calc_ttest function!') return stat,pvalue ############################################################################### ###############################################################################", "calc_RMSE function!') ### Import modules import numpy as np from", "= np.cos(np.deg2rad(lats[mask])) meanvar[ens,i,j] = np.nansum(varmask*areamask) \\ /np.sum(areamask) elif var.ndim ==", "weighted covariance!') return wcov def corr(x, y, w): \"\"\"Weighted Correlation\"\"\"", "calc_spatialCorr(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorr function!') ### Import modules import", "calc_spatialCorr(varx,vary,lats,lons,weight) [6] calc_RMSE(varx,vary,lats,lons,weight) [7] calc_spatialCorrHeight(varx,vary,lats,lons,weight) [8] calc_spatialCorrHeightLev(varx,vary,lats,lons,weight,levelq) \"\"\" def calcDecJan(varx,vary,lat,lon,level,levsq):", "== 3: meanvar = np.empty((var.shape[0])) for i in range(var.shape[0]): varq", "= np.meshgrid(lons,lats) ### Create 2d array of weights based on", "4d array [year,lat,lon] or [year,lev,lat,lon] vary_dj : 3d array [year,lat,lon]", "Using calc_ttest function!') ### Import modules import numpy as np", "weight : string (yes or no) Returns ------- rmse :", "r) Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons,levels) \"\"\" print('\\n>>> Using calc_spatialCorrHeightLev", "[year,month,lat,lon] or [year,month,lev,lat,lon] lat : 1d numpy array latitudes lon", "calc_ttest(varx,vary) \"\"\" print('\\n>>> Using calc_ttest function!') ### Import modules import", "<gh_stars>1-10 \"\"\" Functions are useful untilities for SITperturb experiments Notes", "np.nanmean(np.reshape(djappendf, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) ### Reshape for 4d variables elif level", "varq = var[i,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask =", "varx : 2d array vary : 2d array levs :", "levelq == 'strato': gwq = np.array([0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.5,1.,1.,1.,1. ,1.,1.]) gw,gw2 = np.meshgrid(lons,gwq)", "weight == 'yes': # Computed weighted correlation coefficient ### mask", "1d array lons : 1d array of latitude weight :", "= two-tailed p-value Usage ----- stat,pvalue = calc_ttest(varx,vary) \"\"\" print('\\n>>>", "np.nansum(varmask*areamask) \\ /np.sum(areamask) elif var.ndim == 4: meanvar = np.empty((var.shape[0],var.shape[1]))", "############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeightLev(varx,vary,levs,lons,weight,levelq): \"\"\" Calculates spatial correlation from", "meanvar[i,j] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 3: meanvar = np.empty((var.shape[0]))", "& np.isfinite(lats) varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar =", "0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) elif levelq == 'tropo': gwq =", "gw = np.cos(np.deg2rad(lat2)) def m(x, w): \"\"\"Weighted Mean\"\"\" wave =", "vary : 2d array lons : 1d array of latitude", "<NAME> Date : 13 August 2017 Usage ----- [1] calcDecJan(varx,vary,lat,lon,level,levsq)", "2d array vary : 2d array lons : 1d array", "Parameters ---------- var : 5d,4d,3d array of a gridded variable", "i >= 12: counter = i//12 djappendh = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djappendf", "var[ens,i,j,:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask = varq[mask] areamask", "for SITperturb experiments Notes ----- Author : <NAME> Date :", "= np.empty((var.shape[0],var.shape[1],var.shape[2])) for ens in range(var.shape[0]): for i in range(var.shape[1]):", "as np ### Reshape for 3d variables if level ==", "numpy as np import scipy.stats as sts ### 2-independent sample", "average for 3d,2d,1d array Usage ----- meanvar = calc_weightedAve(var,lats) \"\"\"", "counter = i//12 djfappendh1 = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:,:],", "\"\"\" print('\\n>>> Using calc_spatialCorrHeightLev function!') ### Import modules import numpy", "[1] calcDecJan(varx,vary,lat,lon,level,levsq) [2] calcDecJanFeb(varx,vary,lat,lon,level,levsq) [3] calc_indttest(varx,vary) [4] calc_weightedAve(var,lats) [5] calc_spatialCorr(varx,vary,lats,lons,weight)", "varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0)", "= np.append(djfappendh1, varxravel[13+i,:,:,:]) djfappendf = np.append(djfappendf1, varyravel[13+i,:,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh,", "calc_ttest function!') ### Import modules import numpy as np import", "average!') return wave def cov(x, y, w): \"\"\"Weighted Covariance\"\"\" wcov", "Reshape for 3d variables if level == 'surface': varxravel =", "(17 pressure coordinate levels) Parameters ---------- varx : 2d array", "vary[latq,:] print('MASKING LATITUDES!') ### Create 2d meshgrid for weights lon2,lat2", "np.empty((var.shape[0])) varq = var[:,:] mask = np.isfinite(varq) & np.isfinite(lats) varmask", "Returns ------- varx_dj : 3d array or 4d array [year,lat,lon]", "different level correlations Parameters ---------- varx : 2d array vary", "lon : 1d numpy array longitudes level : string Height", "statistical difference for 2 independent sample t-test Parameters ---------- varx", "array longitudes level : string Height of variable (surface or", "np.sum(x * w) / np.sum(w) print('Completed: Computed weighted average!') return", "weighted correlation coefficient ### mask mask = 'yes' if mask", "/ np.sum(w) print('Completed: Computed weighted covariance!') return wcov def corr(x,", "'tropo': gwq = np.array([1.0,1.0,1.0,1.0,0.5,0.5,0.5,0.2,0.2,0.,0.,0., 0.,0.,0.,0.,0.]) gw,gw2 = np.meshgrid(lons,gwq) elif levelq", "= corr(varx,vary,gw) elif weight == 'no': ### Correlation coefficient from", "latitude weight : string (yes or no) Returns ------- corrcoef", "latitude weight : string (yes or no) levelq : string", "[year,lat,lon] or [year,lev,lat,lon] vary_djf : 3d array [year,lat,lon] or [year,lev,lat,lon]", "varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,int(lat.shape[0]),int(lon.shape[0]))), axis=0)", ": string (yes or no) levelq : string (all, tropo,", "np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_dj = np.empty((int(vary.shape[0]-1),levsq,", "varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar = np.nansum(varmask*areamask)/np.sum(areamask) else: print(ValueError('Variable has", "correlation from pearson correlation coefficient for grids over vertical height", "Levels)!') return wcov def corr(x, y, w): \"\"\"Weighted Correlation\"\"\" wcor", "= np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_dj", ": integer number of levels Returns ------- varx_dj : 3d", "varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i,j] = np.nansum(varmask*areamask)/np.sum(areamask) elif", "SITperturb experiments Notes ----- Author : <NAME> Date : 13", "from sklearn (not weighted) rmse = np.sqrt(mean_squared_error(varx.ravel(),vary.ravel())) print('Completed: Computed NON-weighted", "for j in range(var.shape[1]): varq = var[i,j,:,:] mask = np.isfinite(varq)", "varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_djf", "== 'all': ### Create 2d array of weights based on", "gwq = np.array([0.,0.,0.,0.,0.,0.,0.,0.,0.,0.,0.5,1.,1.,1.,1. ,1.,1.]) gw,gw2 = np.meshgrid(lons,gwq) def m(x, w):", "---------- varx : 3d array vary : 3d array Returns", "function (not weighted) corrcoef= np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed: Computed NON-weighted correlation!') else:", "int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_dj", "### Reshape for 4d variables elif level == 'profile': varxravel", "for i in range(var.shape[1]): for j in range(var.shape[2]): varq =", "(int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0]))", "correlation coefficient (pearson r) Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons,levels) \"\"\"", "\"\"\" Function calculates average for December-January Parameters ---------- varx :", "= np.empty((var.shape[0])) for i in range(var.shape[0]): varq = var[i,:,:] mask", "np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_djf = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i in range(0,varxravel.shape[0]-12,12): counter", "the wrong dimensions!')) print('Completed: Weighted variable average!') print('*Completed: Finished calc_weightedAve", "---------- varx : 4d array or 5d array [year,month,lat,lon] or", "for ens in range(var.shape[0]): for i in range(var.shape[1]): for j", "(y - m(y, w))) / np.sum(w) print('Completed: Computed weighted covariance", "var.ndim == 4: meanvar = np.empty((var.shape[0],var.shape[1])) for i in range(var.shape[0]):", "correlation coefficient (pearson r) Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons) \"\"\"", "j in range(var.shape[1]): varq = var[i,j,:,:] mask = np.isfinite(varq) &", "np.cos(np.deg2rad(lat2)) ### Calculate rmse sq_err = (varx - vary)**2 rmse", "varmask = varq[mask] areamask = np.cos(np.deg2rad(lats[mask])) meanvar[i] = np.nansum(varmask*areamask)/np.sum(areamask) elif", "function!') return rmse ############################################################################### ############################################################################### ############################################################################### def calc_spatialCorrHeight(varx,vary,levs,lons,weight): \"\"\" Calculates", "Significant at 95% confidence level pvalue[np.where(pvalue >= 0.05)] = np.nan", "np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_dj = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i in range(0,varxravel.shape[0]-12,12): counter", "untilities for SITperturb experiments Notes ----- Author : <NAME> Date", "----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons,levels) \"\"\" print('\\n>>> Using calc_spatialCorrHeightLev function!') ###", "meanvar[i] = np.nansum(varmask*areamask)/np.sum(areamask) elif var.ndim == 2: meanvar = np.empty((var.shape[0]))", "= i//12 djappendh = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djappendf = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) varx_dj[counter,:,:] =", "12: counter = i//12 djfappendh1 = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:])", "np.reshape(varx.copy(), (int(varx.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_djf =", "Usage ----- [1] calcDecJan(varx,vary,lat,lon,level,levsq) [2] calcDecJanFeb(varx,vary,lat,lon,level,levsq) [3] calc_indttest(varx,vary) [4] calc_weightedAve(var,lats)", "(yes or no) levelq : string (all, tropo, strato) Returns", "Finished calcDecJan function!') return varx_dj,vary_dj ############################################################################### ############################################################################### ############################################################################### def calcDecJanFeb(varx,vary,lat,lon,level,levsq):", "weight == 'yes': # Computed weighted correlation coefficient ### Create", "def calcDecJan(varx,vary,lat,lon,level,levsq): \"\"\" Function calculates average for December-January Parameters ----------", ": 3d array [year,lat,lon] or [year,lev,lat,lon] Usage ----- varx_djf,vary_djf =", "square error from sklearn (not weighted) rmse = np.sqrt(mean_squared_error(varx.ravel(),vary.ravel())) print('Completed:", "rmse sq_err = (varx - vary)**2 rmse = np.sqrt((np.sum(sq_err*gw))/np.sum(gw)) elif", "int(lat.shape[0]),int(lon.shape[0]))) varx_djf = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_djf = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for i", "on latitude gwq = np.array([0.25,0.25,0.25,0.25,0.25,0.25,0.4,0.5,0.5,0.5, 0.5,0.5,0.5,0.7,0.7,0.7,1.]) gw,gw2 = np.meshgrid(lons,gwq) elif", "cov(y, y, w)) print('Completed: Computed weighted correlation!') return wcor corrcoef", "array latitudes lon : 1d numpy array longitudes level :", "pvalue[np.where(pvalue < 0.05)] = 1. print('*Completed: Finished calc_ttest function!') return", "Create 2d array of weights based on latitude gwq =", "1d numpy array latitudes lon : 1d numpy array longitudes", "array of a gridded variable lats : 2d array of", "varxravel[12+i,:,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) djfappendh = np.append(djfappendh1, varxravel[13+i,:,:,:]) djfappendf", "varx_djf = np.empty((int(varx.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0]))) vary_djf = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for", "\"\"\" Functions are useful untilities for SITperturb experiments Notes -----", "var.ndim == 5: meanvar = np.empty((var.shape[0],var.shape[1],var.shape[2])) for ens in range(var.shape[0]):", "Date : 13 August 2017 Usage ----- [1] calcDecJan(varx,vary,lat,lon,level,levsq) [2]", "print('\\n>>> Using calcDecJan function!') ### Import modules import numpy as", "elif weight == 'no': ### Root mean square error from", "\"\"\" Calculates root mean square weighted average Parameters ---------- varx", "vary_djf : 3d array [year,lat,lon] or [year,lev,lat,lon] Usage ----- varx_djf,vary_djf", "print('MASKING LATITUDES!') ### Create 2d meshgrid for weights lon2,lat2 =", "as np ### Calculate weighted average for various dimensional arrays", "------- rmse : 1d array Usage ----- rmse = calc_RMSE(varx,vary,lats,lons)", "= calc_weightedAve(var,lats) \"\"\" print('\\n>>> Using calc_weightedAve function!') ### Import modules", "print('\\n>>> Using calc_RMSE function!') ### Import modules import numpy as", "np.append(djfappendh1, varxravel[13+i,:,:,:]) djfappendf = np.append(djfappendf1, varyravel[13+i,:,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,levsq,int(lat.shape[0]),", "array [year,month,lat,lon] or [year,month,lev,lat,lon] lat : 1d numpy array latitudes", "4d array or 5d array [year,month,lat,lon] or [year,month,lev,lat,lon] lat :", "i//12 djappendh = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djappendf = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh,", "np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) djfappendh = np.append(djfappendh1,varxravel[13+i,:,:]) djfappendf = np.append(djfappendf1,varyravel[13+i,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh,", "levels). Change the weighting for different level correlations Parameters ----------", "mean square weighted average Parameters ---------- varx : 2d array", "np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel = np.reshape(vary.copy(), (int(vary.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varx_dj =", "print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong weighted arguement in function!')", "### Create 2d array of weights based on latitude gw", "djappendf = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_dj[counter,:,:]", "level : string Height of variable (surface or profile) levsq", "= i//12 djappendh = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djappendf = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:])", ">= 0.05)] = np.nan pvalue[np.where(pvalue < 0.05)] = 1. print('*Completed:", "latq = np.where(lats > 40)[0] lats = lats[latq] varx =", "coordinate levels) Parameters ---------- varx : 2d array vary :", "lon2,lev2 = np.meshgrid(lons,levs) ### Create 2d array of weights based", "calcDecJan function!') return varx_dj,vary_dj ############################################################################### ############################################################################### ############################################################################### def calcDecJanFeb(varx,vary,lat,lon,level,levsq): \"\"\"", "wave = np.sum(x * w) / np.sum(w) print('Completed: Computed weighted", "Returns ------- meanvar : weighted average for 3d,2d,1d array Usage", ": 1d array Usage ----- rmse = calc_RMSE(varx,vary,lats,lons) \"\"\" print('\\n>>>", "levsq : integer number of levels Returns ------- varx_dj :", "= np.sum(x * w) / np.sum(w) print('Completed: Computed weighted average", "i >= 12: counter = i//12 djappendh = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:])", "for weights lon2,lev2 = np.meshgrid(lons,levs) ### Create 2d array of", "array vary : 2d array lats : 1d array lons", "* cov(y, y, w)) print('Completed: Computed weighted correlation!') return wcor", "pvalue = two-tailed p-value Usage ----- stat,pvalue = calc_ttest(varx,vary) \"\"\"", "for different level correlations Parameters ---------- varx : 2d array", "calc_spatialCorrHeight function!') ### Import modules import numpy as np if", "levsq : integer number of levels Returns ------- varx_djf :", "= np.cos(np.deg2rad(lat2)) def m(x, w): \"\"\"Weighted Mean\"\"\" wave = np.sum(x", "levelq : string (all, tropo, strato) Returns ------- corrcoef :", "y, w): \"\"\"Weighted Correlation\"\"\" wcor = cov(x, y, w) /", "== 5: meanvar = np.empty((var.shape[0],var.shape[1],var.shape[2])) for ens in range(var.shape[0]): for", "(not weighted) corrcoef= np.corrcoef(varx.ravel(),vary.ravel())[0][1] print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong", ": 13 August 2017 Usage ----- [1] calcDecJan(varx,vary,lat,lon,level,levsq) [2] calcDecJanFeb(varx,vary,lat,lon,level,levsq)", "no) Returns ------- corrcoef : 1d array of correlation coefficient", "djappendh = np.append(varxravel[11+i,:,:,:], varxravel[12+i,:,:,:]) djappendf = np.append(varyravel[11+i,:,:,:], varyravel[12+i,:,:,:]) varx_dj[counter,:,:] =", "varyravel[13+i,:,:,:]) varx_djf[counter,:,:] = np.nanmean(np.reshape(djfappendh, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,levsq,int(lat.shape[0]),", "np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) varx_dj[counter,:,:] = np.nanmean(np.reshape(djappendh, (2,int(lat.shape[0]),int(lon.shape[0]))), axis=0) vary_dj[counter,:,:] = np.nanmean(np.reshape(djappendf, (2,int(lat.shape[0]),int(lon.shape[0]))),", "vary_djf[counter,:,:] = np.nanmean(np.reshape(djfappendf, (3,levsq,int(lat.shape[0]), int(lon.shape[0]))),axis=0) else: print(ValueError('Selected wrong height -", "np.nansum(varmask*areamask)/np.sum(areamask) else: print(ValueError('Variable has the wrong dimensions!')) print('Completed: Weighted variable", "elif level == 'profile': varxravel = np.reshape(varx.copy(), (int(varx.shape[0]*12.),levsq, int(lat.shape[0]),int(lon.shape[0]))) varyravel", "weighted) rmse = np.sqrt(mean_squared_error(varx.ravel(),vary.ravel())) print('Completed: Computed NON-weighted correlation!') else: ValueError('Wrong", ": 1d array of correlation coefficient (pearson r) Usage -----", "areamask = np.cos(np.deg2rad(lats[mask])) meanvar = np.nansum(varmask*areamask)/np.sum(areamask) else: print(ValueError('Variable has the", "sit array 5d [ens,year,month,lat,lon] into [ens,year,month] Parameters ---------- var :", "r) Usage ----- corrcoef = calc_spatialCorrHeight(varx,vary,lats,lons) \"\"\" print('\\n>>> Using calc_spatialCorrHeight", "(surface or profile) levsq : integer number of levels Returns", "(int(vary.shape[0]*12), int(lat.shape[0]),int(lon.shape[0]))) varx_dj = np.empty((varx.shape[0]-1,lat.shape[0],lon.shape[0])) vary_dj = np.empty((vary.shape[0]-1,lat.shape[0],lon.shape[0]) ) for", "counter = i//12 djfappendh1 = np.append(varxravel[11+i,:,:],varxravel[12+i,:,:]) djfappendf1 = np.append(varyravel[11+i,:,:],varyravel[12+i,:,:]) djfappendh", "(not weighted) rmse = np.sqrt(mean_squared_error(varx.ravel(),vary.ravel())) print('Completed: Computed NON-weighted correlation!') else:", "(17 P Levels)!') return wave def cov(x, y, w): \"\"\"Weighted", "2 independent sample t-test Parameters ---------- varx : 3d array", "modules import numpy as np if weight == 'yes': #", "Usage ----- meanvar = calc_weightedAve(var,lats) \"\"\" print('\\n>>> Using calc_weightedAve function!')", "vary_djf = np.empty((int(vary.shape[0]-1),levsq, int(lat.shape[0]),int(lon.shape[0])) ) for i in range(0,varxravel.shape[0]-12,12): counter", "string Height of variable (surface or profile) levsq : integer", "m(x, w): \"\"\"Weighted Mean\"\"\" wave = np.sum(x * w) /", "lon2,lat2 = np.meshgrid(lons,lats) ### Create 2d array of weights based", "by months (ON,DJ,FM)!') print('*Completed: Finished calcDecJan function!') return varx_dj,vary_dj ###############################################################################" ]
[ "\"C\") as [vi, vj]: C[vi, vj] = B[vi, vj] +", "vi + 1, vj], (1, 1)) C[vi, vj] = Bb[0,", "\"C\" def test_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\")", "128], \"C\") as [vi, vj]: D[vi, vj] = B[vi, vj]", "D = tir.match_buffer(d, (128, 128)) with tir.block([128, 128], \"C\") as", "tir.block([128, 128], \"C\") as [vi, vj]: C[vi, vj] = (B[vi,", "assert sch.get(block_d).name_hint == \"D\" def test_compute_inline_fail_multi_writer(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True,", "OF ANY # KIND, either express or implied. See the", "pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_d) def test_reverse_compute_inline_fail_multi_reader(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True) block_c =", "with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_reverse_compute_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_reverse_loads, debug_mode=True) block_c", "B[vi, vj] + 2.0 + C[vi, vj] # D has", "more contributor license agreements. See the NOTICE file # distributed", "vj] = A[vi, vj] * 2.0 + A[vi, vj +", "* 2.0 + 1.0 @tvm.script.tir def fail_multi_reader_writer(a: ty.handle, d: ty.handle)", "vj] + 1.0 @tvm.script.tir def elementwise_predicate_inlined(a: ty.handle, c: ty.handle) ->", "debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_buffer_matched(): sch", "= A[vi, vj] * 2.0 # B has two consumers", "Apache Software Foundation (ASF) under one # or more contributor", "* 2.0 + 1.0) * (A[vi, vj] * 2.0 *", "vj] * 2.0 for j in tir.serial(0, 128): with tir.block([128,", "block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert", "(A[vi, vj] * 2.0 * 2.0) + 3.0 @tvm.script.tir def", "WARRANTIES OR CONDITIONS OF ANY # KIND, either express or", "sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_as_dce(): sch", "ty.handle, c: ty.handle, d: ty.handle) -> None: A = tir.match_buffer(a,", "= tir.Schedule(buffer_matched, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def", "2.0 (the # \"License\"); you may not use this file", "2.0 + 1.0 @tvm.script.tir def fail_multi_reader_writer(a: ty.handle, d: ty.handle) ->", "+ 2.0 + C[vi, vj] @tvm.script.tir def elementwise_standalone(a: ty.handle, c:", "vj] = A[vi, vj] * 2.0 with tir.block([128, 128], \"C\")", "C[vi, vj] = tir.load(\"float32\", B.data, vi * 16 + vj)", "test_compute_inline_fail_multi_writer(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True, error_render_level=\"detail\") block_b = sch.get_block(\"B\") with", "tir.serial(0, 128): with tir.block([128, 128], \"C\") as [vi, vj]: tir.bind(vi,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "1.0 @tvm.script.tir def elementwise_multi_producer_consumer(a: ty.handle, c: ty.handle, d: ty.handle) ->", "128], \"C\") as [vi, vj]: Bb = tir.match_buffer(B[vi : vi", "128)) C = tir.match_buffer(c, (128, 128)) D = tir.match_buffer(d, (128,", "tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_as_dce(): sch =", "specific language governing permissions and limitations # under the License.", "= tir.match_buffer(c, (128, 128)) for i, j in tir.grid(128, 128):", "\"C\") as [vi, vj]: D[vi, vj] = B[vi, vj] +", "under the License is distributed on an # \"AS IS\"", "+ 1.0) * (A[vi, vj] * 2.0 * 2.0) +", "if __name__ == \"__main__\": test_compute_inline_elementwise() test_compute_inline_under_loop() test_compute_inline_as_dce() test_compute_inline_multi_consumer() test_compute_inline_fail_multi_writer() test_reverse_compute_inline_elementwise()", "vj] * 2.0 * 2.0) + 3.0 @tvm.script.tir def opaque_access_load(a:", "128)) C = tir.match_buffer(c, (128, 128)) with tir.block([128, 128], \"C\")", "def test_compute_inline_multi_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b)", "with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_compute_inline_predicate(): sch = tir.Schedule(elementwise_predicate, debug_mode=True) block_b", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either", "2.0 + 1.0 @tvm.script.tir def elementwise_multi_loads(a: ty.handle, c: ty.handle) ->", "sch = tir.Schedule(elementwise_predicate, debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_predicate_inlined, sch.mod[\"main\"])", "tir.serial(0, 128): for j in tir.serial(0, 128): with tir.block([128, 128],", "test_reverse_compute_inline_fail_as_dce() test_reverse_compute_inline_fail_multi_producer() test_reverse_compute_inline_fail_multi_reader() test_reverse_compute_multi_reverse_loads() test_reverse_compute_fail_multi_reverse_loads() test_opaque_access_load() test_opaque_access_store() test_buffer_matched() test_compute_inline_predicate() test_compute_inline_multi_loads()", "def test_reverse_compute_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_reverse_loads, debug_mode=True) block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c)", "= A[vi, vj] * 2.0 for j in tir.serial(0, 128):", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #", "vj] = A[vi, vj] * 2.0 for i, j in", "with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_b) def test_reverse_compute_inline_fail_multi_producer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_d", "block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_buffer_matched(): sch =", "128)) D = tir.match_buffer(d, (128, 128)) with tir.block([128, 128], \"B\")", "128): with tir.block([128, 128], \"C\") as [vi, vj]: tir.where(A[i, j]", "as [vi, vj]: C[vi, vj] = (B[vi, vj] + 1.0)", "2.0 + A[vi, vj + 2] * 2.0 # pylint:", "= A[vi, vj] * 2.0 + A[vi, vj + 1]", "distributed with this work for additional information # regarding copyright", "vj]: C[vi, vj] = (B[vi, vj] + 1.0) * (B[vi,", "2.0 with tir.block([128, 126], \"C\") as [vi, vj]: C[vi, vj]", "two producers @tvm.script.tir def elementwise_multi_consumer_inlined(a: ty.handle, c: ty.handle, d: ty.handle)", "debug_mode=True) block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_multi_reverse_loads_inlined, sch.mod[\"main\"]) def test_reverse_compute_fail_multi_reverse_loads(): sch", "tir.writes(C[0:128, 0:128]) tir.store(C.data, vi * 128 + vj, B[vi, vj]", "B[vi, vj] = A[vi, vj] * 2.0 for j in", "\"B\") as [vi, vj]: B[vi, vj] = A[vi, vj] *", "sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\")", "for the # specific language governing permissions and limitations #", "vj]: B[vi, vj] = A[vi, vj] * 2.0 # B", "vi * 128 + vj, B[vi, vj] + 1.0) C[vi,", "+ C[vi, vj] # D has two producers @tvm.script.tir def", "* 2.0 for j in tir.serial(0, 128): with tir.block([128, 128],", "= tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c)", "= tir.Schedule(opaque_access_store, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def", "A = tir.match_buffer(a, (128, 128)) B = tir.alloc_buffer((128, 128)) C", "with tir.block([128, 128], \"B\") as [vi, vj]: tir.bind(vi, i) tir.bind(vj,", "vj] = (B[vi, vj] + 1.0) * (B[vi, vj] *", "See the License for the # specific language governing permissions", "to in writing, # software distributed under the License is", "tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c, (128, 128)) B =", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "128)) C = tir.match_buffer(c, (128, 128)) B = tir.alloc_buffer((128, 128))", "2.0) + 3.0 @tvm.script.tir def opaque_access_load(a: ty.handle, c: ty.handle) ->", "1.0) C[vi, vj] = tir.load(\"float32\", B.data, vi * 16 +", "Bb = tir.match_buffer(B[vi : vi + 1, vj], (1, 1))", "sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_reverse_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True)", "pylint: disable=no-member,invalid-name,unused-variable @tvm.script.tir def elementwise(a: ty.handle, c: ty.handle) -> None:", "128)) for i, j in tir.grid(128, 128): with tir.block([128, 128],", "def test_reverse_compute_fail_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_c = sch.get_block(\"C\") with", "tir.serial(0, 128): with tir.block([128, 128], \"B\") as [vi, vj]: tir.bind(vi,", "= sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_opaque_access_load(): sch = tir.Schedule(opaque_access_load,", "test_reverse_compute_inline_elementwise() test_reverse_compute_inline_under_loop() test_reverse_compute_inline_fail_as_dce() test_reverse_compute_inline_fail_multi_producer() test_reverse_compute_inline_fail_multi_reader() test_reverse_compute_multi_reverse_loads() test_reverse_compute_fail_multi_reverse_loads() test_opaque_access_load() test_opaque_access_store() test_buffer_matched()", "vj] * 2.0 C[vi, vj] = A[vi, vj] + 2.0", "block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_b) def test_reverse_compute_inline_fail_multi_producer(): sch =", "__name__ == \"__main__\": test_compute_inline_elementwise() test_compute_inline_under_loop() test_compute_inline_as_dce() test_compute_inline_multi_consumer() test_compute_inline_fail_multi_writer() test_reverse_compute_inline_elementwise() test_reverse_compute_inline_under_loop()", "* 2.0 + A[vi, vj + 1] * 2.0 +", "1.0 @tvm.script.tir def buffer_matched(a: ty.handle, c: ty.handle) -> None: A", "file # distributed with this work for additional information #", "test_compute_inline_multi_consumer() test_compute_inline_fail_multi_writer() test_reverse_compute_inline_elementwise() test_reverse_compute_inline_under_loop() test_reverse_compute_inline_fail_as_dce() test_reverse_compute_inline_fail_multi_producer() test_reverse_compute_inline_fail_multi_reader() test_reverse_compute_multi_reverse_loads() test_reverse_compute_fail_multi_reverse_loads() test_opaque_access_load()", "2.0 for i, j in tir.grid(128, 128): with tir.block([128, 128],", "B[vi, vj] = A[vi, vj] * 2.0 with tir.block([128, 126],", "C[vi, vj] = B[vi, vj] + B[vi, vj + 1]", "vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) C[vi, vj] = tir.load(\"float32\", B.data,", "test_compute_inline_multi_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_loads_inlined,", "c: ty.handle) -> None: A = tir.match_buffer(a, (128, 128)) B", "with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_buffer_matched(): sch = tir.Schedule(buffer_matched, debug_mode=True) block_b", "sch = tir.Schedule(elementwise_multi_reverse_loads, debug_mode=True) block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_multi_reverse_loads_inlined, sch.mod[\"main\"])", "A[vi, vj] * 2.0 C[vi, vj] = A[vi, vj] +", "128)) B = tir.alloc_buffer((128, 128)) C = tir.alloc_buffer((128, 128)) D", "as [vi, vj]: C[vi, vj] = A[vi, vj] * 2.0", "test_compute_inline_as_dce() test_compute_inline_multi_consumer() test_compute_inline_fail_multi_writer() test_reverse_compute_inline_elementwise() test_reverse_compute_inline_under_loop() test_reverse_compute_inline_fail_as_dce() test_reverse_compute_inline_fail_multi_producer() test_reverse_compute_inline_fail_multi_reader() test_reverse_compute_multi_reverse_loads() test_reverse_compute_fail_multi_reverse_loads()", "debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_loads_inlined, sch.mod[\"main\"]) if __name__ ==", "with tir.block([128, 128], \"C\") as [vi, vj]: tir.where(A[i, j] *", "sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True) block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c)", "test_compute_inline_under_loop() test_compute_inline_as_dce() test_compute_inline_multi_consumer() test_compute_inline_fail_multi_writer() test_reverse_compute_inline_elementwise() test_reverse_compute_inline_under_loop() test_reverse_compute_inline_fail_as_dce() test_reverse_compute_inline_fail_multi_producer() test_reverse_compute_inline_fail_multi_reader() test_reverse_compute_multi_reverse_loads()", "tvm.ir.assert_structural_equal(elementwise_multi_loads_inlined, sch.mod[\"main\"]) if __name__ == \"__main__\": test_compute_inline_elementwise() test_compute_inline_under_loop() test_compute_inline_as_dce() test_compute_inline_multi_consumer()", "+ 3.0 @tvm.script.tir def elementwise_multi_reverse_loads_inlined(a: ty.handle, c: ty.handle) -> None:", "128], \"C\") as [vi, vj]: C[vi, vj] = A[vi, vj]", "elementwise_inlined(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "as [vi, vj]: D[vi, vj] = A[vi, vj] * 2.0", "128): with tir.block([128, 128], \"C\") as [vi, vj]: tir.bind(vi, i)", "implied. See the License for the # specific language governing", "vj]: C[vi, vj] = B[vi, vj] + 1.0 with tir.block([128,", "to you under the Apache License, Version 2.0 (the #", "vj] + 1.0 @tvm.script.tir def elementwise_multi_producer_consumer(a: ty.handle, c: ty.handle, d:", "tir.match_buffer(a, (128, 128)) B = tir.alloc_buffer((128, 128)) C = tir.alloc_buffer((128,", "* (B[vi, vj] * 2.0) + 3.0 @tvm.script.tir def elementwise_multi_reverse_loads_inlined(a:", "sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_under_loop():", "+ C[vi, vj] @tvm.script.tir def elementwise_multi_reverse_loads(a: ty.handle, c: ty.handle) ->", "@tvm.script.tir def buffer_matched(a: ty.handle, c: ty.handle) -> None: A =", "A[vi, vj] * 2.0 with tir.block([128, 128], \"C\") as [vi,", "as [vi, vj]: tir.where(A[i, j] * 2.0 < 10.0) C[vi,", "with tir.block([128, 128], \"B\") as [vi, vj]: B[vi, vj] =", "tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_under_loop(): sch =", "may not use this file except in compliance # with", "= tir.Schedule(fail_multi_reader_writer, debug_mode=True) block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def", "from tvm import tir from tvm.script import ty # pylint:", "sch.mod[\"main\"]) if __name__ == \"__main__\": test_compute_inline_elementwise() test_compute_inline_under_loop() test_compute_inline_as_dce() test_compute_inline_multi_consumer() test_compute_inline_fail_multi_writer()", "block_d = sch.get_block(\"D\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_consumer_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\"", "tir.block([128, 128], \"D\") as [vi, vj]: D[vi, vj] = B[vi,", "= sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint", "sch.reverse_compute_inline(block_d) def test_reverse_compute_inline_fail_multi_reader(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True) block_c = sch.get_block(\"C\")", "* 2.0 with tir.block([128, 126], \"C\") as [vi, vj]: C[vi,", "(128, 128)) C = tir.match_buffer(c, (128, 128)) D = tir.match_buffer(d,", "vj + 1] * 2.0 + A[vi, vj + 2]", "License, Version 2.0 (the # \"License\"); you may not use", "test_compute_inline_predicate(): sch = tir.Schedule(elementwise_predicate, debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_predicate_inlined,", "either express or implied. See the License for the #", "= tir.alloc_buffer((128, 128)) C = tir.alloc_buffer((128, 128)) D = tir.match_buffer(d,", "2.0 * 2.0) + 3.0 @tvm.script.tir def opaque_access_load(a: ty.handle, c:", "= B[vi, vj] + 2.0 + C[vi, vj] # D", "tir.match_buffer(B[vi : vi + 1, vj], (1, 1)) C[vi, vj]", "= tir.alloc_buffer((128, 128)) C = tir.match_buffer(c, (128, 128)) with tir.block([128,", "C = tir.match_buffer(c, (128, 128)) for i, j in tir.grid(128,", "debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") block_d = sch.get_block(\"D\")", "126], \"C\") as [vi, vj]: C[vi, vj] = B[vi, vj]", "tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) tir.store(C.data, vi * 128 + vj,", "10.0) C[vi, vj] = B[vi, vj] + 1.0 @tvm.script.tir def", "with tir.block([128, 128], \"C\") as [vi, vj]: tir.bind(vi, i) tir.bind(vj,", "elementwise_multi_loads(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "= tir.Schedule(opaque_access_load, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def", "1.0) * (B[vi, vj] * 2.0) + 3.0 @tvm.script.tir def", "block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\"", "A[vi, vj] * 2.0 + A[vi, vj + 1] *", "= tir.load(\"float32\", B.data, vi * 128 + vj) + 1.0", "(128, 128)) B = tir.alloc_buffer((128, 128)) C = tir.match_buffer(c, (128,", "additional information # regarding copyright ownership. The ASF licenses this", "vj] = B[vi, vj] + 1.0 @tvm.script.tir def elementwise_inlined(a: ty.handle,", "@tvm.script.tir def elementwise_multi_loads_inlined(a: ty.handle, c: ty.handle) -> None: A =", "128], \"C\") as [vi, vj]: C[vi, vj] = B[vi, vj]", "+ 1] * 2.0 + A[vi, vj + 2] *", "sch.compute_inline(block_b) def test_reverse_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\")", "sch.compute_inline(block_b) def test_compute_inline_predicate(): sch = tir.Schedule(elementwise_predicate, debug_mode=True) block_b = sch.get_block(\"B\")", "(128, 128)) C = tir.match_buffer(c, (128, 128)) B = tir.alloc_buffer((128,", "See the NOTICE file # distributed with this work for", "== \"D\" def test_compute_inline_fail_multi_writer(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True, error_render_level=\"detail\") block_b", "debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_b) def test_reverse_compute_inline_fail_multi_producer(): sch", "* 2.0 with tir.block([128, 128], \"C\") as [vi, vj]: tir.reads(B[0:128,", "tir.Schedule(fail_multi_reader_writer, debug_mode=True, error_render_level=\"detail\") block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def", "3.0 @tvm.script.tir def elementwise_multi_reverse_loads_inlined(a: ty.handle, c: ty.handle) -> None: A", "vj]: D[vi, vj] = A[vi, vj] * 2.0 + 2.0", "test_reverse_compute_inline_under_loop() test_reverse_compute_inline_fail_as_dce() test_reverse_compute_inline_fail_multi_producer() test_reverse_compute_inline_fail_multi_reader() test_reverse_compute_multi_reverse_loads() test_reverse_compute_fail_multi_reverse_loads() test_opaque_access_load() test_opaque_access_store() test_buffer_matched() test_compute_inline_predicate()", "Apache License, Version 2.0 (the # \"License\"); you may not", "128], \"B\") as [vi, vj]: B[vi, vj] = A[vi, vj]", "vj] + 1.0 @tvm.script.tir def elementwise_under_loop(a: ty.handle, c: ty.handle) ->", "sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint ==", "sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b", "tir.load(\"float32\", B.data, vi * 128 + vj) + 1.0 @tvm.script.tir", "vj]: tir.where(B[i, j] < 10.0) C[vi, vj] = B[vi, vj]", "@tvm.script.tir def elementwise(a: ty.handle, c: ty.handle) -> None: A =", "= sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_buffer_matched(): sch = tir.Schedule(buffer_matched,", "i, j in tir.grid(128, 128): with tir.block([128, 128], \"C\") as", "vj] = A[vi, vj] + 1.0 @tvm.script.tir def elementwise_standalone_dce(a: ty.handle,", "pylint: enable=no-member,invalid-name,unused-variable def test_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True) block_b =", "* 2.0) + 3.0 @tvm.script.tir def elementwise_multi_reverse_loads_inlined(a: ty.handle, c: ty.handle)", "vj] * 2.0 + 2.0 + C[vi, vj] @tvm.script.tir def", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "file except in compliance # with the License. You may", "[vi, vj]: tir.bind(vi, i) tir.bind(vj, j) C[vi, vj] = B[vi,", "tir.block([128, 128], \"B\") as [vi, vj]: B[vi, vj] = A[vi,", "128)) C = tir.match_buffer(c, (128, 128)) for i, j in", "sch = tir.Schedule(buffer_matched, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b)", "# specific language governing permissions and limitations # under the", "2.0 + C[vi, vj] # D has two producers @tvm.script.tir", "128], \"B\") as [vi, vj]: C[vi, vj] = (A[vi, vj]", "B[vi, vj] + 1.0 @tvm.script.tir def elementwise_predicate_inlined(a: ty.handle, c: ty.handle)", "128], \"C\") as [vi, vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) C[vi,", "= Bb[0, 0] + 1.0 @tvm.script.tir def elementwise_predicate(a: ty.handle, c:", "@tvm.script.tir def elementwise_predicate_inlined(a: ty.handle, c: ty.handle) -> None: A =", "0] + 1.0 @tvm.script.tir def elementwise_predicate(a: ty.handle, c: ty.handle) ->", "sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_standalone_dce, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_multi_consumer():", "you may not use this file except in compliance #", "tir.block([128, 128], \"D\") as [vi, vj]: D[vi, vj] = A[vi,", "@tvm.script.tir def fail_multi_reader_writer(a: ty.handle, d: ty.handle) -> None: A =", "C[vi, vj] = (B[vi, vj] + 1.0) * (B[vi, vj]", "128): for j in tir.serial(0, 128): with tir.block([128, 128], \"B\")", "[vi, vj]: tir.where(B[i, j] < 10.0) C[vi, vj] = B[vi,", "(128, 128)) D = tir.match_buffer(d, (128, 128)) with tir.block([128, 128],", "128)) for i in tir.serial(0, 128): for j in tir.serial(0,", "use this file except in compliance # with the License.", "= sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_predicate_inlined, sch.mod[\"main\"]) def test_compute_inline_multi_loads(): sch = tir.Schedule(elementwise_multi_loads,", "[vi, vj]: C[vi, vj] = B[vi, vj] + B[vi, vj", "128], \"C\") as [vi, vj]: C[vi, vj] = (B[vi, vj]", "C = tir.match_buffer(c, (128, 128)) with tir.block([128, 126], \"C\") as", "128], \"D\") as [vi, vj]: D[vi, vj] = B[vi, vj]", "contributor license agreements. See the NOTICE file # distributed with", "vj] = B[vi, vj] + C[vi, vj] @tvm.script.tir def elementwise_multi_reverse_loads(a:", "def elementwise_multi_loads_inlined(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "2.0 # B has two consumers with tir.block([128, 128], \"C\")", "D = tir.match_buffer(d, (128, 128)) with tir.block([128, 128], \"B\") as", "# pylint: enable=no-member,invalid-name,unused-variable def test_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True) block_b", "A[vi, vj] * 2.0 with tir.block([128, 126], \"C\") as [vi,", "vj] * 2.0 with tir.block([128, 126], \"C\") as [vi, vj]:", "1.0 @tvm.script.tir def opaque_access_store(a: ty.handle, c: ty.handle) -> None: A", "test_opaque_access_load(): sch = tir.Schedule(opaque_access_load, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError):", "with tir.block([128, 128], \"C\") as [vi, vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128,", "[vi, vj]: Bb = tir.match_buffer(B[vi : vi + 1, vj],", "[vi, vj]: B[vi, vj] = A[vi, vj] * 2.0 #", "an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "= tir.match_buffer(c, (128, 128)) with tir.block([128, 128], \"B\") as [vi,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express", "2] * 2.0 # pylint: enable=no-member,invalid-name,unused-variable def test_compute_inline_elementwise(): sch =", "\"B\" def test_reverse_compute_inline_fail_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\")", "with this work for additional information # regarding copyright ownership.", "import tir from tvm.script import ty # pylint: disable=no-member,invalid-name,unused-variable @tvm.script.tir", "= tir.alloc_buffer((128, 128)) for i in tir.serial(0, 128): for j", "+ 1.0) * (B[vi, vj] * 2.0) + 3.0 @tvm.script.tir", "C = tir.alloc_buffer((128, 128)) D = tir.match_buffer(d, (128, 128)) with", "block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_loads_inlined, sch.mod[\"main\"]) if __name__ == \"__main__\":", "= A[vi, vj] * 2.0 + 1.0 @tvm.script.tir def elementwise_multi_loads(a:", "vj] * 2.0 + 1.0 @tvm.script.tir def elementwise_multi_loads(a: ty.handle, c:", "two consumers with tir.block([128, 128], \"C\") as [vi, vj]: C[vi,", "work for additional information # regarding copyright ownership. The ASF", "def test_compute_inline_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\") block_c", "tir.block([128, 128], \"B\") as [vi, vj]: C[vi, vj] = (A[vi,", "elementwise_standalone_dce(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "vj] @tvm.script.tir def elementwise_standalone(a: ty.handle, c: ty.handle) -> None: A", "tir.grid(128, 128): with tir.block([128, 128], \"C\") as [vi, vj]: tir.where(B[i,", "as [vi, vj]: D[vi, vj] = B[vi, vj] + C[vi,", "vj] = A[vi, vj] * 2.0 with tir.block([128, 126], \"C\")", "enable=no-member,invalid-name,unused-variable def test_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\")", "A[vi, vj + 2] * 2.0 # pylint: enable=no-member,invalid-name,unused-variable def", "distributed under the License is distributed on an # \"AS", "128)) D = tir.match_buffer(d, (128, 128)) with tir.block([128, 128], \"C\")", "sch.get_block(\"D\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_consumer_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" assert sch.get(block_d).name_hint", "1)) C[vi, vj] = Bb[0, 0] + 1.0 @tvm.script.tir def", "* (A[vi, vj] * 2.0 * 2.0) + 3.0 @tvm.script.tir", "C[vi, vj] = B[vi, vj] + 1.0 @tvm.script.tir def elementwise_inlined(a:", "# software distributed under the License is distributed on an", "tir.alloc_buffer((128, 128)) C = tir.match_buffer(c, (128, 128)) with tir.block([128, 128],", "opaque_access_load(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_reverse_compute_inline_elementwise(): sch =", "tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c, (128, 128)) D =", "ty # pylint: disable=no-member,invalid-name,unused-variable @tvm.script.tir def elementwise(a: ty.handle, c: ty.handle)", "vj]: B[vi, vj] = A[vi, vj] * 2.0 for i,", "[vi, vj]: C[vi, vj] = A[vi, vj] + 1.0 @tvm.script.tir", "the License. You may obtain a copy of the License", "vj] * 2.0 + A[vi, vj + 1] * 2.0", "A[vi, vj] * 2.0 + 2.0 + C[vi, vj] @tvm.script.tir", "def opaque_access_store(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_d = sch.get_block(\"D\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_d) def test_reverse_compute_inline_fail_multi_reader():", "vi * 128 + vj) + 1.0 @tvm.script.tir def opaque_access_store(a:", "block_c = sch.get_block(\"C\") block_d = sch.get_block(\"D\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_consumer_inlined, sch.mod[\"main\"]) assert", "vj] + C[vi, vj] @tvm.script.tir def elementwise_multi_reverse_loads(a: ty.handle, c: ty.handle)", "ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128, 128))", "\"C\") as [vi, vj]: tir.where(A[i, j] * 2.0 < 10.0)", "test_reverse_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\") block_c =", "import pytest import tvm from tvm import tir from tvm.script", "128)) with tir.block([128, 128], \"C\") as [vi, vj]: C[vi, vj]", "vj] * 2.0 + 1.0 with tir.block([128, 128], \"D\") as", "under the Apache License, Version 2.0 (the # \"License\"); you", "as [vi, vj]: C[vi, vj] = B[vi, vj] + 1.0", "pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_buffer_matched(): sch = tir.Schedule(buffer_matched, debug_mode=True) block_b =", "governing permissions and limitations # under the License. # pylint:", "D has two producers @tvm.script.tir def elementwise_multi_consumer_inlined(a: ty.handle, c: ty.handle,", "test_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\") block_c =", "limitations # under the License. # pylint: disable=missing-function-docstring,missing-module-docstring import pytest", "128 + vj, B[vi, vj] + 1.0) C[vi, vj] =", "+ 2] @tvm.script.tir def elementwise_multi_loads_inlined(a: ty.handle, c: ty.handle) -> None:", "distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_predicate_inlined, sch.mod[\"main\"]) def test_compute_inline_multi_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_b", "regarding copyright ownership. The ASF licenses this file # to", "128)) C = tir.alloc_buffer((128, 128)) D = tir.match_buffer(d, (128, 128))", "\"C\" assert sch.get(block_d).name_hint == \"D\" def test_compute_inline_fail_multi_writer(): sch = tir.Schedule(fail_multi_reader_writer,", "\"D\") as [vi, vj]: D[vi, vj] = A[vi, vj] *", "or agreed to in writing, # software distributed under the", "2.0 + 2.0 + C[vi, vj] @tvm.script.tir def elementwise_standalone(a: ty.handle,", "with tir.block([128, 128], \"C\") as [vi, vj]: D[vi, vj] =", "def opaque_access_load(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "test_opaque_access_store(): sch = tir.Schedule(opaque_access_store, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError):", "+ 1] + B[vi, vj + 2] @tvm.script.tir def elementwise_multi_loads_inlined(a:", "A[vi, vj] + 1.0 @tvm.script.tir def elementwise_under_loop(a: ty.handle, c: ty.handle)", "with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_opaque_access_store(): sch = tir.Schedule(opaque_access_store, debug_mode=True) block_b", "vj]: tir.where(A[i, j] * 2.0 < 10.0) C[vi, vj] =", "vj] = (A[vi, vj] * 2.0 + 1.0) * (A[vi,", "* 2.0 for i, j in tir.grid(128, 128): with tir.block([128,", "tir.match_buffer(c, (128, 128)) with tir.block([128, 128], \"B\") as [vi, vj]:", "= tir.match_buffer(d, (128, 128)) with tir.block([128, 128], \"C\") as [vi,", "= tir.match_buffer(B[vi : vi + 1, vj], (1, 1)) C[vi,", "def test_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\") block_c", "C[vi, vj] = A[vi, vj] + 1.0 @tvm.script.tir def elementwise_under_loop(a:", "[vi, vj]: B[vi, vj] = A[vi, vj] * 2.0 C[vi,", "vj] + 1.0 with tir.block([128, 128], \"D\") as [vi, vj]:", "and limitations # under the License. # pylint: disable=missing-function-docstring,missing-module-docstring import", "assert sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_fail_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True)", "@tvm.script.tir def opaque_access_load(a: ty.handle, c: ty.handle) -> None: A =", "D[vi, vj] = B[vi, vj] + C[vi, vj] @tvm.script.tir def", "or more contributor license agreements. See the NOTICE file #", "\"C\") as [vi, vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) tir.store(C.data, vi", "test_compute_inline_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\") block_c =", "B.data, vi * 128 + vj) + 1.0 @tvm.script.tir def", "= tir.match_buffer(a, (128, 128)) B = tir.alloc_buffer((128, 128)) C =", "this work for additional information # regarding copyright ownership. The", "2.0 + 1.0 with tir.block([128, 128], \"D\") as [vi, vj]:", "+ 1.0 @tvm.script.tir def elementwise_multi_producer_consumer(a: ty.handle, c: ty.handle, d: ty.handle)", "def elementwise(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "the NOTICE file # distributed with this work for additional", "\"C\") as [vi, vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) C[vi, vj]", "C[vi, vj] @tvm.script.tir def elementwise_multi_reverse_loads(a: ty.handle, c: ty.handle) -> None:", "+ 1.0 @tvm.script.tir def elementwise_predicate(a: ty.handle, c: ty.handle) -> None:", "sch.get_block(\"C\") block_d = sch.get_block(\"D\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_consumer_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint ==", "pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_b) def test_reverse_compute_inline_fail_multi_producer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_d =", "sch.mod[\"main\"]) assert sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop,", "def test_buffer_matched(): sch = tir.Schedule(buffer_matched, debug_mode=True) block_b = sch.get_block(\"B\") with", "def test_reverse_compute_inline_fail_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\") with", "sch.mod[\"main\"]) def test_reverse_compute_fail_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_c = sch.get_block(\"C\")", "= tir.Schedule(elementwise_multi_loads, debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_loads_inlined, sch.mod[\"main\"]) if", "B[vi, vj] = A[vi, vj] * 2.0 C[vi, vj] =", "+ 1.0 @tvm.script.tir def opaque_access_store(a: ty.handle, c: ty.handle) -> None:", "tir.match_buffer(c, (128, 128)) B = tir.alloc_buffer((128, 128)) for i in", "tir.bind(vi, i) tir.bind(vj, j) C[vi, vj] = B[vi, vj] +", "B[vi, vj] + 1.0) C[vi, vj] = tir.load(\"float32\", B.data, vi", "tir.block([128, 128], \"C\") as [vi, vj]: tir.where(A[i, j] * 2.0", "== \"C\" def test_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b =", "< 10.0) C[vi, vj] = A[vi, vj] * 2.0 +", "sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_b) def test_reverse_compute_inline_fail_multi_producer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True)", "elementwise_multi_reverse_loads_inlined(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "elementwise_multi_consumer_inlined(a: ty.handle, c: ty.handle, d: ty.handle) -> None: A =", "vj]: B[vi, vj] = A[vi, vj] * 2.0 with tir.block([128,", "== \"C\" def test_compute_inline_multi_consumer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_b =", "= tir.match_buffer(c, (128, 128)) with tir.block([128, 126], \"C\") as [vi,", "= sch.get_block(\"D\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_d) def test_reverse_compute_inline_fail_multi_reader(): sch = tir.Schedule(fail_multi_reader_writer,", "def elementwise_multi_reverse_loads_inlined(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "(1, 1)) C[vi, vj] = Bb[0, 0] + 1.0 @tvm.script.tir", "as [vi, vj]: C[vi, vj] = (A[vi, vj] * 2.0", "disable=missing-function-docstring,missing-module-docstring import pytest import tvm from tvm import tir from", "pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_reverse_compute_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_reverse_loads, debug_mode=True) block_c =", ": vi + 1, vj], (1, 1)) C[vi, vj] =", "1.0 @tvm.script.tir def elementwise_under_loop(a: ty.handle, c: ty.handle) -> None: A", "tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined,", "tir.block([128, 128], \"C\") as [vi, vj]: tir.bind(vi, i) tir.bind(vj, j)", "d: ty.handle) -> None: A = tir.match_buffer(a, (128, 128)) C", "= tir.load(\"float32\", B.data, vi * 16 + vj) + 1.0", "\"B\") as [vi, vj]: C[vi, vj] = (A[vi, vj] *", "C[vi, vj] = A[vi, vj] + 2.0 with tir.block([128, 128],", "tir.load(\"float32\", B.data, vi * 16 + vj) + 1.0 @tvm.script.tir", "B = tir.alloc_buffer((128, 128)) for i in tir.serial(0, 128): for", "sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_standalone_dce, sch.mod[\"main\"]) assert sch.get(block_c).name_hint ==", "vj] + 2.0 + C[vi, vj] # D has two", "= tir.match_buffer(d, (128, 128)) with tir.block([128, 128], \"B\") as [vi,", "C = tir.match_buffer(c, (128, 128)) B = tir.alloc_buffer((128, 128)) for", "KIND, either express or implied. See the License for the", "tir.writes(C[0:128, 0:128]) C[vi, vj] = tir.load(\"float32\", B.data, vi * 128", "block_d = sch.get_block(\"D\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_d) def test_reverse_compute_inline_fail_multi_reader(): sch =", "vj]: C[vi, vj] = A[vi, vj] * 2.0 + 1.0", "C = tir.match_buffer(c, (128, 128)) D = tir.match_buffer(d, (128, 128))", "+ 2.0 with tir.block([128, 128], \"C\") as [vi, vj]: D[vi,", "2.0) + 3.0 @tvm.script.tir def elementwise_multi_reverse_loads_inlined(a: ty.handle, c: ty.handle) ->", "B[vi, vj + 2] @tvm.script.tir def elementwise_multi_loads_inlined(a: ty.handle, c: ty.handle)", "sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_b)", "test_compute_inline_multi_consumer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_b = sch.get_block(\"B\") block_c =", "test_compute_inline_elementwise() test_compute_inline_under_loop() test_compute_inline_as_dce() test_compute_inline_multi_consumer() test_compute_inline_fail_multi_writer() test_reverse_compute_inline_elementwise() test_reverse_compute_inline_under_loop() test_reverse_compute_inline_fail_as_dce() test_reverse_compute_inline_fail_multi_producer() test_reverse_compute_inline_fail_multi_reader()", "elementwise_under_loop(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_d) def test_reverse_compute_inline_fail_multi_reader(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True) block_c", "2.0 with tir.block([128, 128], \"C\") as [vi, vj]: D[vi, vj]", "B[vi, vj] = A[vi, vj] * 2.0 with tir.block([128, 128],", "buffer_matched(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "Bb[0, 0] + 1.0 @tvm.script.tir def elementwise_predicate(a: ty.handle, c: ty.handle)", "sch.get(block_c).name_hint == \"C\" def test_compute_inline_multi_consumer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_b", "(128, 128)) B = tir.alloc_buffer((128, 128)) for i in tir.serial(0,", "or implied. See the License for the # specific language", "express or implied. See the License for the # specific", "B = tir.alloc_buffer((128, 128)) C = tir.match_buffer(c, (128, 128)) with", "+ 1, vj], (1, 1)) C[vi, vj] = Bb[0, 0]", "sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_fail_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b", "sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_predicate_inlined, sch.mod[\"main\"]) def test_compute_inline_multi_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True)", "tir.block([128, 128], \"C\") as [vi, vj]: C[vi, vj] = A[vi,", "vj]: B[vi, vj] = A[vi, vj] * 2.0 C[vi, vj]", "* 2.0 # B has two consumers with tir.block([128, 128],", "A[vi, vj] * 2.0 for j in tir.serial(0, 128): with", "[vi, vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) C[vi, vj] = tir.load(\"float32\",", "126], \"C\") as [vi, vj]: C[vi, vj] = A[vi, vj]", "C[vi, vj] = Bb[0, 0] + 1.0 @tvm.script.tir def elementwise_predicate(a:", "sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_d = sch.get_block(\"D\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_d)", "block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_standalone_dce, sch.mod[\"main\"]) assert", "the # specific language governing permissions and limitations # under", "2.0 # pylint: enable=no-member,invalid-name,unused-variable def test_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True)", "i) tir.bind(vj, j) C[vi, vj] = B[vi, vj] + 1.0", "2.0 with tir.block([128, 128], \"C\") as [vi, vj]: Bb =", "= sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_reverse_compute_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_reverse_loads,", "\"C\") as [vi, vj]: Bb = tir.match_buffer(B[vi : vi +", "tir.Schedule(elementwise_multi_loads, debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_loads_inlined, sch.mod[\"main\"]) if __name__", "has two consumers with tir.block([128, 128], \"C\") as [vi, vj]:", "error_render_level=\"detail\") block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_reverse_compute_inline_elementwise(): sch", "j) B[vi, vj] = A[vi, vj] * 2.0 for j", "may obtain a copy of the License at # #", "vj] * 2.0) + 3.0 @tvm.script.tir def elementwise_multi_reverse_loads_inlined(a: ty.handle, c:", "0:128]) tir.writes(C[0:128, 0:128]) C[vi, vj] = tir.load(\"float32\", B.data, vi *", "def buffer_matched(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "@tvm.script.tir def elementwise_multi_consumer_inlined(a: ty.handle, c: ty.handle, d: ty.handle) -> None:", "= tir.match_buffer(c, (128, 128)) D = tir.match_buffer(d, (128, 128)) with", "test_reverse_compute_inline_fail_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError):", "in tir.serial(0, 128): with tir.block([128, 128], \"C\") as [vi, vj]:", "tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_standalone_dce,", "The ASF licenses this file # to you under the", "# D has two producers @tvm.script.tir def elementwise_multi_consumer_inlined(a: ty.handle, c:", "tir.block([128, 128], \"C\") as [vi, vj]: Bb = tir.match_buffer(B[vi :", "128)) B = tir.alloc_buffer((128, 128)) C = tir.match_buffer(c, (128, 128))", "= tir.alloc_buffer((128, 128)) D = tir.match_buffer(d, (128, 128)) with tir.block([128,", "vj] + 1.0) * (B[vi, vj] * 2.0) + 3.0", "def elementwise_under_loop(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "+ 3.0 @tvm.script.tir def opaque_access_load(a: ty.handle, c: ty.handle) -> None:", "tir.block([128, 128], \"C\") as [vi, vj]: C[vi, vj] = B[vi,", "block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_standalone_dce, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\"", "# Licensed to the Apache Software Foundation (ASF) under one", "j in tir.serial(0, 128): with tir.block([128, 128], \"C\") as [vi,", "vj] = B[vi, vj] + 2.0 + C[vi, vj] #", "tir.bind(vj, j) B[vi, vj] = A[vi, vj] * 2.0 for", "= sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_standalone_dce, sch.mod[\"main\"]) assert sch.get(block_c).name_hint", "= sch.get_block(\"D\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_consumer_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" assert", "has two producers @tvm.script.tir def elementwise_multi_consumer_inlined(a: ty.handle, c: ty.handle, d:", "vj] = B[vi, vj] + B[vi, vj + 1] +", "None: A = tir.match_buffer(a, (128, 128)) B = tir.alloc_buffer((128, 128))", "assert sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True)", "C[vi, vj] = A[vi, vj] * 2.0 + 1.0 @tvm.script.tir", "A[vi, vj] * 2.0 # B has two consumers with", "B[vi, vj] = A[vi, vj] * 2.0 for i, j", "sch.get_block(\"B\") block_c = sch.get_block(\"C\") block_d = sch.get_block(\"D\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_consumer_inlined, sch.mod[\"main\"])", "sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_opaque_access_store(): sch = tir.Schedule(opaque_access_store, debug_mode=True)", "1.0 with tir.block([128, 128], \"D\") as [vi, vj]: D[vi, vj]", "law or agreed to in writing, # software distributed under", "= A[vi, vj] * 2.0 with tir.block([128, 126], \"C\") as", "elementwise_multi_reverse_loads(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\")", "Foundation (ASF) under one # or more contributor license agreements.", "assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True)", "tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_fail_as_dce(): sch =", "= tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c, (128, 128)) for", "= (A[vi, vj] * 2.0 + 1.0) * (A[vi, vj]", "[vi, vj]: C[vi, vj] = (A[vi, vj] * 2.0 +", "C = tir.match_buffer(c, (128, 128)) with tir.block([128, 128], \"B\") as", "(128, 128)) with tir.block([128, 128], \"C\") as [vi, vj]: C[vi,", "test_reverse_compute_inline_fail_multi_reader(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True) block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError):", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "sch.get(block_c).name_hint == \"C\" def test_compute_inline_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b", "Software Foundation (ASF) under one # or more contributor license", "128 + vj) + 1.0 @tvm.script.tir def opaque_access_store(a: ty.handle, c:", "= sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def", "sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_multi_reverse_loads_inlined, sch.mod[\"main\"]) def test_reverse_compute_fail_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_c", "C[vi, vj] = B[vi, vj] + 1.0 with tir.block([128, 128],", "[vi, vj]: B[vi, vj] = A[vi, vj] * 2.0 for", "# regarding copyright ownership. The ASF licenses this file #", "block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_predicate_inlined, sch.mod[\"main\"]) def test_compute_inline_multi_loads(): sch =", "vj] = A[vi, vj] * 2.0 + 1.0 @tvm.script.tir def", "+ 1.0 @tvm.script.tir def buffer_matched(a: ty.handle, c: ty.handle) -> None:", "in compliance # with the License. You may obtain a", "# to you under the Apache License, Version 2.0 (the", "License for the # specific language governing permissions and limitations", "1.0 @tvm.script.tir def elementwise_inlined(a: ty.handle, c: ty.handle) -> None: A", "vj] * 2.0 + 1.0 @tvm.script.tir def fail_multi_reader_writer(a: ty.handle, d:", "elementwise_multi_producer_consumer(a: ty.handle, c: ty.handle, d: ty.handle) -> None: A =", "vj] * 2.0 with tir.block([128, 128], \"C\") as [vi, vj]:", "C[vi, vj] = (A[vi, vj] * 2.0 + 1.0) *", "* 2.0 + 1.0 @tvm.script.tir def elementwise_multi_loads(a: ty.handle, c: ty.handle)", "vj] + 1.0) C[vi, vj] = tir.load(\"float32\", B.data, vi *", "OR CONDITIONS OF ANY # KIND, either express or implied.", "vj]: C[vi, vj] = (A[vi, vj] * 2.0 + 1.0)", "tir from tvm.script import ty # pylint: disable=no-member,invalid-name,unused-variable @tvm.script.tir def", "[vi, vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) tir.store(C.data, vi * 128", "128], \"C\") as [vi, vj]: tir.where(A[i, j] * 2.0 <", "= B[vi, vj] + 1.0 @tvm.script.tir def elementwise_multi_producer_consumer(a: ty.handle, c:", "as [vi, vj]: Bb = tir.match_buffer(B[vi : vi + 1,", "tir.where(B[i, j] < 10.0) C[vi, vj] = B[vi, vj] +", "+ A[vi, vj + 1] * 2.0 + A[vi, vj", "tir.match_buffer(a, (128, 128)) B = tir.alloc_buffer((128, 128)) C = tir.match_buffer(c,", "== \"B\" def test_reverse_compute_inline_fail_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b =", "tir.bind(vj, j) C[vi, vj] = B[vi, vj] + 1.0 @tvm.script.tir", "tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_b) def test_reverse_compute_inline_fail_multi_producer():", "tir.Schedule(fail_multi_reader_writer, debug_mode=True) block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_reverse_compute_multi_reverse_loads():", "vj]: C[vi, vj] = A[vi, vj] * 2.0 + A[vi,", "def elementwise_predicate_inlined(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "this file # to you under the Apache License, Version", "(128, 128)) with tir.block([128, 128], \"B\") as [vi, vj]: C[vi,", "0:128]) C[vi, vj] = tir.load(\"float32\", B.data, vi * 128 +", "tir.match_buffer(c, (128, 128)) for i, j in tir.grid(128, 128): with", "copyright ownership. The ASF licenses this file # to you", "+ 1.0 @tvm.script.tir def elementwise_multi_loads(a: ty.handle, c: ty.handle) -> None:", "sch.get(block_c).name_hint == \"C\" def test_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b", "< 10.0) C[vi, vj] = B[vi, vj] + 1.0 @tvm.script.tir", "with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_opaque_access_load(): sch = tir.Schedule(opaque_access_load, debug_mode=True) block_b", "A[vi, vj + 1] * 2.0 + A[vi, vj +", "producers @tvm.script.tir def elementwise_multi_consumer_inlined(a: ty.handle, c: ty.handle, d: ty.handle) ->", "sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_b).name_hint ==", "block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_reverse_compute_multi_reverse_loads(): sch =", "tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) C[vi, vj] = tir.load(\"float32\", B.data, vi", "10.0) C[vi, vj] = A[vi, vj] * 2.0 + 1.0", "128)) with tir.block([128, 126], \"C\") as [vi, vj]: C[vi, vj]", "sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True, error_render_level=\"detail\") block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError):", "D[vi, vj] = A[vi, vj] * 2.0 + 2.0 +", "debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_opaque_access_store(): sch", "[vi, vj]: D[vi, vj] = B[vi, vj] + 2.0 +", "\"C\" def test_compute_inline_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\")", "sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop,", "in writing, # software distributed under the License is distributed", "vj] @tvm.script.tir def elementwise_multi_reverse_loads(a: ty.handle, c: ty.handle) -> None: A", "vj] + 1.0 @tvm.script.tir def elementwise_standalone_dce(a: ty.handle, c: ty.handle) ->", "i) tir.bind(vj, j) B[vi, vj] = A[vi, vj] * 2.0", "tir.block([128, 128], \"C\") as [vi, vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128])", "* 128 + vj) + 1.0 @tvm.script.tir def opaque_access_store(a: ty.handle,", "2.0 + C[vi, vj] @tvm.script.tir def elementwise_standalone(a: ty.handle, c: ty.handle)", "= tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_b) def", "[vi, vj]: B[vi, vj] = A[vi, vj] * 2.0 with", "with tir.block([128, 128], \"B\") as [vi, vj]: C[vi, vj] =", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "vj] = B[vi, vj] + 1.0 @tvm.script.tir def elementwise_multi_producer_consumer(a: ty.handle,", "ty.handle) -> None: A = tir.match_buffer(a, (128, 128)) B =", "License is distributed on an # \"AS IS\" BASIS, WITHOUT", "= A[vi, vj] * 2.0 with tir.block([128, 128], \"C\") as", "debug_mode=True, error_render_level=\"detail\") block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_reverse_compute_inline_elementwise():", "128], \"D\") as [vi, vj]: D[vi, vj] = A[vi, vj]", "sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_under_loop():", "debug_mode=True) block_d = sch.get_block(\"D\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_d) def test_reverse_compute_inline_fail_multi_reader(): sch", "B[vi, vj] = A[vi, vj] * 2.0 # B has", "@tvm.script.tir def elementwise_under_loop(a: ty.handle, c: ty.handle) -> None: A =", "sch.get_block(\"D\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_d) def test_reverse_compute_inline_fail_multi_reader(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True)", "assert sch.get(block_c).name_hint == \"C\" assert sch.get(block_d).name_hint == \"D\" def test_compute_inline_fail_multi_writer():", "# pylint: disable=no-member,invalid-name,unused-variable @tvm.script.tir def elementwise(a: ty.handle, c: ty.handle) ->", "= tir.alloc_buffer((128, 128)) C = tir.match_buffer(c, (128, 128)) D =", "B[vi, vj] + B[vi, vj + 1] + B[vi, vj", "in tir.grid(128, 128): with tir.block([128, 128], \"C\") as [vi, vj]:", "None: A = tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c, (128,", "# \"License\"); you may not use this file except in", "# B has two consumers with tir.block([128, 128], \"C\") as", "def elementwise_standalone_dce(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "128)) with tir.block([128, 128], \"B\") as [vi, vj]: C[vi, vj]", "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "tir.alloc_buffer((128, 128)) C = tir.alloc_buffer((128, 128)) D = tir.match_buffer(d, (128,", "to the Apache Software Foundation (ASF) under one # or", "as [vi, vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) C[vi, vj] =", "pytest import tvm from tvm import tir from tvm.script import", "\"License\"); you may not use this file except in compliance", "[vi, vj]: tir.bind(vi, i) tir.bind(vj, j) B[vi, vj] = A[vi,", "16 + vj) + 1.0 @tvm.script.tir def buffer_matched(a: ty.handle, c:", "elementwise_multi_loads_inlined(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "2.0 < 10.0) C[vi, vj] = A[vi, vj] * 2.0", "block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_opaque_access_load(): sch =", "test_compute_inline_fail_multi_writer() test_reverse_compute_inline_elementwise() test_reverse_compute_inline_under_loop() test_reverse_compute_inline_fail_as_dce() test_reverse_compute_inline_fail_multi_producer() test_reverse_compute_inline_fail_multi_reader() test_reverse_compute_multi_reverse_loads() test_reverse_compute_fail_multi_reverse_loads() test_opaque_access_load() test_opaque_access_store()", "sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_loads_inlined, sch.mod[\"main\"]) if __name__ == \"__main__\": test_compute_inline_elementwise() test_compute_inline_under_loop() test_compute_inline_as_dce()", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "ty.handle, d: ty.handle) -> None: A = tir.match_buffer(a, (128, 128))", "tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined,", "import tvm from tvm import tir from tvm.script import ty", "128)) C = tir.match_buffer(c, (128, 128)) with tir.block([128, 126], \"C\")", "ty.handle) -> None: A = tir.match_buffer(a, (128, 128)) C =", "# distributed with this work for additional information # regarding", "tvm from tvm import tir from tvm.script import ty #", "= A[vi, vj] * 2.0 for i, j in tir.grid(128,", "= A[vi, vj] + 2.0 with tir.block([128, 128], \"C\") as", "writing, # software distributed under the License is distributed on", "(128, 128)) for i, j in tir.grid(128, 128): with tir.block([128,", "def test_reverse_compute_inline_fail_multi_producer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_d = sch.get_block(\"D\") with", "sch.reverse_compute_inline(block_c) def test_opaque_access_load(): sch = tir.Schedule(opaque_access_load, debug_mode=True) block_b = sch.get_block(\"B\")", "* 2.0) + 3.0 @tvm.script.tir def opaque_access_load(a: ty.handle, c: ty.handle)", "= sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_standalone_dce, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def", "vj] + 2.0 with tir.block([128, 128], \"C\") as [vi, vj]:", "+ B[vi, vj + 2] @tvm.script.tir def elementwise_multi_loads_inlined(a: ty.handle, c:", "sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_consumer_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" assert sch.get(block_d).name_hint ==", "sch.get(block_d).name_hint == \"D\" def test_compute_inline_fail_multi_writer(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True, error_render_level=\"detail\")", "= tir.Schedule(fail_multi_reader_writer, debug_mode=True, error_render_level=\"detail\") block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b)", "* 2.0 < 10.0) C[vi, vj] = A[vi, vj] *", "B has two consumers with tir.block([128, 128], \"C\") as [vi,", "B[vi, vj] + 1.0 @tvm.script.tir def elementwise_inlined(a: ty.handle, c: ty.handle)", "def elementwise_multi_producer_consumer(a: ty.handle, c: ty.handle, d: ty.handle) -> None: A", "= tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c)", "== \"B\" def test_reverse_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b =", "CONDITIONS OF ANY # KIND, either express or implied. See", "= A[vi, vj] * 2.0 + 2.0 + C[vi, vj]", "0:128]) tir.store(C.data, vi * 128 + vj, B[vi, vj] +", "vj]: D[vi, vj] = B[vi, vj] + C[vi, vj] @tvm.script.tir", "* 2.0 + 2.0 + C[vi, vj] @tvm.script.tir def elementwise_standalone(a:", "the License. # pylint: disable=missing-function-docstring,missing-module-docstring import pytest import tvm from", "* 128 + vj, B[vi, vj] + 1.0) C[vi, vj]", "+ 1.0 @tvm.script.tir def elementwise_predicate_inlined(a: ty.handle, c: ty.handle) -> None:", "tir.match_buffer(d, (128, 128)) with tir.block([128, 128], \"B\") as [vi, vj]:", "sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_buffer_matched(): sch = tir.Schedule(buffer_matched, debug_mode=True)", "B[vi, vj] + 1.0 with tir.block([128, 128], \"D\") as [vi,", "d: ty.handle) -> None: A = tir.match_buffer(a, (128, 128)) B", "\"B\" def test_reverse_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\")", "(128, 128)) C = tir.match_buffer(c, (128, 128)) with tir.block([128, 128],", "= sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_compute_inline_predicate(): sch = tir.Schedule(elementwise_predicate,", "debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_standalone_dce, sch.mod[\"main\"])", "for additional information # regarding copyright ownership. The ASF licenses", "= tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c, (128, 128)) with", "the Apache Software Foundation (ASF) under one # or more", "@tvm.script.tir def elementwise_predicate(a: ty.handle, c: ty.handle) -> None: A =", "vj] = A[vi, vj] * 2.0 for j in tir.serial(0,", "# # Unless required by applicable law or agreed to", "+ vj) + 1.0 @tvm.script.tir def buffer_matched(a: ty.handle, c: ty.handle)", "Version 2.0 (the # \"License\"); you may not use this", "vj] = tir.load(\"float32\", B.data, vi * 16 + vj) +", "vj]: tir.bind(vi, i) tir.bind(vj, j) C[vi, vj] = B[vi, vj]", "+ vj, B[vi, vj] + 1.0) C[vi, vj] = tir.load(\"float32\",", "128], \"C\") as [vi, vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) tir.store(C.data,", "sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_under_loop(): sch", "one # or more contributor license agreements. See the NOTICE", "0:128]) tir.writes(C[0:128, 0:128]) tir.store(C.data, vi * 128 + vj, B[vi,", "sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_as_dce():", "tir.bind(vi, i) tir.bind(vj, j) B[vi, vj] = A[vi, vj] *", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "= A[vi, vj] * 2.0 + 1.0 @tvm.script.tir def fail_multi_reader_writer(a:", "(128, 128)) B = tir.alloc_buffer((128, 128)) C = tir.alloc_buffer((128, 128))", "as [vi, vj]: tir.where(B[i, j] < 10.0) C[vi, vj] =", "debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"])", "B = tir.alloc_buffer((128, 128)) C = tir.match_buffer(c, (128, 128)) D", "block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") block_d = sch.get_block(\"D\") sch.compute_inline(block_b)", "pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_reverse_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True) block_b =", "tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c, (128, 128)) with tir.block([128,", "= tir.Schedule(elementwise_predicate, debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_predicate_inlined, sch.mod[\"main\"]) def", "vj + 2] @tvm.script.tir def elementwise_multi_loads_inlined(a: ty.handle, c: ty.handle) ->", "sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_under_loop(): sch", "tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined,", "sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_standalone_dce, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_multi_consumer(): sch", "tvm.ir.assert_structural_equal(elementwise_standalone_dce, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_multi_consumer(): sch =", "= tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c, (128, 128)) D", "except in compliance # with the License. You may obtain", "[vi, vj]: C[vi, vj] = B[vi, vj] + 1.0 with", "j] < 10.0) C[vi, vj] = B[vi, vj] + 1.0", "c: ty.handle, d: ty.handle) -> None: A = tir.match_buffer(a, (128,", "as [vi, vj]: C[vi, vj] = B[vi, vj] + B[vi,", "= tir.Schedule(elementwise_standalone, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b)", "[vi, vj]: C[vi, vj] = A[vi, vj] * 2.0 +", "tvm.ir.assert_structural_equal(elementwise_predicate_inlined, sch.mod[\"main\"]) def test_compute_inline_multi_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_b =", "NOTICE file # distributed with this work for additional information", "sch.reverse_compute_inline(block_c) def test_reverse_compute_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_reverse_loads, debug_mode=True) block_c = sch.get_block(\"C\")", "vj] = A[vi, vj] + 2.0 with tir.block([128, 128], \"C\")", "@tvm.script.tir def elementwise_multi_reverse_loads(a: ty.handle, c: ty.handle) -> None: A =", "this file except in compliance # with the License. You", "vj]: C[vi, vj] = A[vi, vj] + 1.0 @tvm.script.tir def", "2.0 for j in tir.serial(0, 128): with tir.block([128, 128], \"C\")", "sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_as_dce(): sch = tir.Schedule(elementwise_standalone,", "def test_compute_inline_multi_consumer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_b = sch.get_block(\"B\") block_c", "= B[vi, vj] + C[vi, vj] @tvm.script.tir def elementwise_multi_reverse_loads(a: ty.handle,", "B[vi, vj] + 1.0 @tvm.script.tir def elementwise_multi_producer_consumer(a: ty.handle, c: ty.handle,", "* 2.0 + 1.0 with tir.block([128, 128], \"D\") as [vi,", "tir.alloc_buffer((128, 128)) for i in tir.serial(0, 128): for j in", "-> None: A = tir.match_buffer(a, (128, 128)) B = tir.alloc_buffer((128,", "test_buffer_matched(): sch = tir.Schedule(buffer_matched, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError):", "+ 1.0 with tir.block([128, 128], \"D\") as [vi, vj]: D[vi,", "pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_compute_inline_predicate(): sch = tir.Schedule(elementwise_predicate, debug_mode=True) block_b =", "license agreements. See the NOTICE file # distributed with this", "j] * 2.0 < 10.0) C[vi, vj] = A[vi, vj]", "vj] + 1.0 @tvm.script.tir def elementwise_inlined(a: ty.handle, c: ty.handle) ->", "required by applicable law or agreed to in writing, #", "@tvm.script.tir def elementwise_standalone(a: ty.handle, c: ty.handle) -> None: A =", "vj] = A[vi, vj] + 1.0 @tvm.script.tir def elementwise_under_loop(a: ty.handle,", "+ 2.0 + C[vi, vj] # D has two producers", "for i, j in tir.grid(128, 128): with tir.block([128, 128], \"C\")", "(128, 128)) with tir.block([128, 128], \"B\") as [vi, vj]: B[vi,", "= sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_multi_reverse_loads_inlined, sch.mod[\"main\"]) def test_reverse_compute_fail_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_loads,", "128)) C = tir.match_buffer(c, (128, 128)) with tir.block([128, 128], \"B\")", "the License for the # specific language governing permissions and", "for j in tir.serial(0, 128): with tir.block([128, 128], \"B\") as", "128], \"C\") as [vi, vj]: tir.bind(vi, i) tir.bind(vj, j) C[vi,", "permissions and limitations # under the License. # pylint: disable=missing-function-docstring,missing-module-docstring", "C[vi, vj] = B[vi, vj] + 1.0 @tvm.script.tir def elementwise_multi_producer_consumer(a:", "vj] * 2.0 + 1.0) * (A[vi, vj] * 2.0", "vj] = tir.load(\"float32\", B.data, vi * 128 + vj) +", "vj, B[vi, vj] + 1.0) C[vi, vj] = tir.load(\"float32\", B.data,", "vj] + B[vi, vj + 1] + B[vi, vj +", "def test_reverse_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\") block_c", "ANY # KIND, either express or implied. See the License", "debug_mode=True) block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_reverse_compute_multi_reverse_loads(): sch", "vj] = A[vi, vj] * 2.0 # B has two", "vj]: tir.bind(vi, i) tir.bind(vj, j) B[vi, vj] = A[vi, vj]", "the License is distributed on an # \"AS IS\" BASIS,", "tir.block([128, 126], \"C\") as [vi, vj]: C[vi, vj] = A[vi,", "sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_multi_consumer(): sch = tir.Schedule(elementwise_multi_producer_consumer,", "def test_compute_inline_fail_multi_writer(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True, error_render_level=\"detail\") block_b = sch.get_block(\"B\")", "\"C\" def test_compute_inline_multi_consumer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_b = sch.get_block(\"B\")", "2.0 + A[vi, vj + 1] * 2.0 + A[vi,", "= sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_loads_inlined, sch.mod[\"main\"]) if __name__ == \"__main__\": test_compute_inline_elementwise()", "j in tir.grid(128, 128): with tir.block([128, 128], \"C\") as [vi,", "vj] = Bb[0, 0] + 1.0 @tvm.script.tir def elementwise_predicate(a: ty.handle,", "128], \"B\") as [vi, vj]: tir.bind(vi, i) tir.bind(vj, j) B[vi,", "sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_compute_inline_predicate(): sch = tir.Schedule(elementwise_predicate, debug_mode=True)", "block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_opaque_access_store(): sch =", "vi * 16 + vj) + 1.0 @tvm.script.tir def buffer_matched(a:", "\"D\") as [vi, vj]: D[vi, vj] = B[vi, vj] +", "tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") block_d =", "\"B\") as [vi, vj]: tir.bind(vi, i) tir.bind(vj, j) B[vi, vj]", "[vi, vj]: C[vi, vj] = B[vi, vj] + 1.0 @tvm.script.tir", "pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_opaque_access_store(): sch = tir.Schedule(opaque_access_store, debug_mode=True) block_b =", "not use this file except in compliance # with the", "D[vi, vj] = B[vi, vj] + 2.0 + C[vi, vj]", "vj] = A[vi, vj] * 2.0 + 2.0 + C[vi,", "C[vi, vj] # D has two producers @tvm.script.tir def elementwise_multi_consumer_inlined(a:", "= tir.Schedule(elementwise_multi_loads, debug_mode=True) block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def", "+ B[vi, vj + 1] + B[vi, vj + 2]", "== \"__main__\": test_compute_inline_elementwise() test_compute_inline_under_loop() test_compute_inline_as_dce() test_compute_inline_multi_consumer() test_compute_inline_fail_multi_writer() test_reverse_compute_inline_elementwise() test_reverse_compute_inline_under_loop() test_reverse_compute_inline_fail_as_dce()", "== \"C\" def test_compute_inline_as_dce(): sch = tir.Schedule(elementwise_standalone, debug_mode=True) block_b =", "vj]: C[vi, vj] = B[vi, vj] + 1.0 @tvm.script.tir def", "C[vi, vj] = A[vi, vj] * 2.0 + A[vi, vj", "= sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_b) def test_reverse_compute_inline_fail_multi_producer(): sch = tir.Schedule(elementwise_multi_producer_consumer,", "\"__main__\": test_compute_inline_elementwise() test_compute_inline_under_loop() test_compute_inline_as_dce() test_compute_inline_multi_consumer() test_compute_inline_fail_multi_writer() test_reverse_compute_inline_elementwise() test_reverse_compute_inline_under_loop() test_reverse_compute_inline_fail_as_dce() test_reverse_compute_inline_fail_multi_producer()", "Unless required by applicable law or agreed to in writing,", "C[vi, vj] = A[vi, vj] * 2.0 + 1.0 with", "C[vi, vj] = A[vi, vj] + 1.0 @tvm.script.tir def elementwise_standalone_dce(a:", "sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_reverse_compute_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_reverse_loads, debug_mode=True)", "[vi, vj]: D[vi, vj] = A[vi, vj] * 2.0 +", "1.0 @tvm.script.tir def elementwise_predicate(a: ty.handle, c: ty.handle) -> None: A", "= tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b)", "tir.alloc_buffer((128, 128)) D = tir.match_buffer(d, (128, 128)) with tir.block([128, 128],", "vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) tir.store(C.data, vi * 128 +", "def test_compute_inline_predicate(): sch = tir.Schedule(elementwise_predicate, debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b)", "vj]: D[vi, vj] = B[vi, vj] + 2.0 + C[vi,", "+ 1.0 @tvm.script.tir def elementwise_inlined(a: ty.handle, c: ty.handle) -> None:", "tir.alloc_buffer((128, 128)) C = tir.match_buffer(c, (128, 128)) D = tir.match_buffer(d,", "= (B[vi, vj] + 1.0) * (B[vi, vj] * 2.0)", "(B[vi, vj] * 2.0) + 3.0 @tvm.script.tir def elementwise_multi_reverse_loads_inlined(a: ty.handle,", "= A[vi, vj] + 1.0 @tvm.script.tir def elementwise_standalone_dce(a: ty.handle, c:", "(ASF) under one # or more contributor license agreements. See", "j) C[vi, vj] = B[vi, vj] + 1.0 @tvm.script.tir def", "sch.compute_inline(block_b) def test_opaque_access_store(): sch = tir.Schedule(opaque_access_store, debug_mode=True) block_b = sch.get_block(\"B\")", "= sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_b).name_hint == \"B\" def", "+ vj) + 1.0 @tvm.script.tir def opaque_access_store(a: ty.handle, c: ty.handle)", "tvm.ir.assert_structural_equal(elementwise_multi_reverse_loads_inlined, sch.mod[\"main\"]) def test_reverse_compute_fail_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_c =", "# or more contributor license agreements. See the NOTICE file", "tir.block([128, 128], \"B\") as [vi, vj]: tir.bind(vi, i) tir.bind(vj, j)", "agreed to in writing, # software distributed under the License", "A[vi, vj] * 2.0 + 1.0 @tvm.script.tir def elementwise_multi_loads(a: ty.handle,", "* 2.0 + A[vi, vj + 2] * 2.0 #", "= B[vi, vj] + 1.0 with tir.block([128, 128], \"D\") as", "= sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_opaque_access_store(): sch = tir.Schedule(opaque_access_store,", "sch.mod[\"main\"]) def test_compute_inline_multi_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_b = sch.get_block(\"B\")", "B = tir.alloc_buffer((128, 128)) C = tir.alloc_buffer((128, 128)) D =", "consumers with tir.block([128, 128], \"C\") as [vi, vj]: C[vi, vj]", "from tvm.script import ty # pylint: disable=no-member,invalid-name,unused-variable @tvm.script.tir def elementwise(a:", "\"C\") as [vi, vj]: C[vi, vj] = (B[vi, vj] +", "1] + B[vi, vj + 2] @tvm.script.tir def elementwise_multi_loads_inlined(a: ty.handle,", "1] * 2.0 + A[vi, vj + 2] * 2.0", "block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_compute_inline_predicate(): sch =", "= tir.match_buffer(c, (128, 128)) with tir.block([128, 128], \"C\") as [vi,", "2.0 C[vi, vj] = A[vi, vj] + 2.0 with tir.block([128,", "tir.match_buffer(d, (128, 128)) with tir.block([128, 128], \"C\") as [vi, vj]:", "* 2.0 with tir.block([128, 128], \"C\") as [vi, vj]: Bb", "vj] = A[vi, vj] * 2.0 + 1.0 with tir.block([128,", "2.0 + 1.0) * (A[vi, vj] * 2.0 * 2.0)", "= A[vi, vj] + 1.0 @tvm.script.tir def elementwise_under_loop(a: ty.handle, c:", "2.0 with tir.block([128, 128], \"C\") as [vi, vj]: tir.reads(B[0:128, 0:128])", "assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_under_loop(): sch = tir.Schedule(elementwise_under_loop, debug_mode=True)", "pylint: disable=missing-function-docstring,missing-module-docstring import pytest import tvm from tvm import tir", "sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" assert sch.get(block_d).name_hint == \"D\" def", "2.0 with tir.block([128, 128], \"C\") as [vi, vj]: C[vi, vj]", "tir.Schedule(elementwise_predicate, debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_predicate_inlined, sch.mod[\"main\"]) def test_compute_inline_multi_loads():", "sch.get(block_c).name_hint == \"C\" assert sch.get(block_d).name_hint == \"D\" def test_compute_inline_fail_multi_writer(): sch", "tir.block([128, 126], \"C\") as [vi, vj]: C[vi, vj] = B[vi,", "def test_reverse_compute_inline_fail_multi_reader(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True) block_c = sch.get_block(\"C\") with", "test_reverse_compute_fail_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError):", "tir.store(C.data, vi * 128 + vj, B[vi, vj] + 1.0)", "debug_mode=True) block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_opaque_access_load(): sch", "sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c)", "= tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") block_d", "(the # \"License\"); you may not use this file except", "tir.where(A[i, j] * 2.0 < 10.0) C[vi, vj] = A[vi,", "3.0 @tvm.script.tir def opaque_access_load(a: ty.handle, c: ty.handle) -> None: A", "A[vi, vj] * 2.0 + 1.0 with tir.block([128, 128], \"D\")", "sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_opaque_access_load(): sch = tir.Schedule(opaque_access_load, debug_mode=True)", "vj] * 2.0 for i, j in tir.grid(128, 128): with", "tir.Schedule(buffer_matched, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_compute_inline_predicate():", "def elementwise_standalone(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "ASF licenses this file # to you under the Apache", "* 2.0 * 2.0) + 3.0 @tvm.script.tir def opaque_access_load(a: ty.handle,", "tir.Schedule(elementwise_multi_reverse_loads, debug_mode=True) block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_multi_reverse_loads_inlined, sch.mod[\"main\"]) def test_reverse_compute_fail_multi_reverse_loads():", "as [vi, vj]: D[vi, vj] = B[vi, vj] + 2.0", "in tir.serial(0, 128): with tir.block([128, 128], \"B\") as [vi, vj]:", "fail_multi_reader_writer(a: ty.handle, d: ty.handle) -> None: A = tir.match_buffer(a, (128,", "128], \"C\") as [vi, vj]: tir.where(B[i, j] < 10.0) C[vi,", "with tir.block([128, 126], \"C\") as [vi, vj]: C[vi, vj] =", "(128, 128)) C = tir.match_buffer(c, (128, 128)) with tir.block([128, 126],", "+ 2] * 2.0 # pylint: enable=no-member,invalid-name,unused-variable def test_compute_inline_elementwise(): sch", "on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c, (128, 128)) for i,", "tvm.ir.assert_structural_equal(elementwise_multi_consumer_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint == \"C\" assert sch.get(block_d).name_hint == \"D\"", "= sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_reverse_compute_inline_elementwise(): sch = tir.Schedule(elementwise,", "* 2.0 C[vi, vj] = A[vi, vj] + 2.0 with", "ownership. The ASF licenses this file # to you under", "test_reverse_compute_inline_fail_multi_producer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_d = sch.get_block(\"D\") with pytest.raises(tvm.tir.ScheduleError):", "debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_compute_inline_predicate(): sch", "128): with tir.block([128, 128], \"C\") as [vi, vj]: tir.where(B[i, j]", "= sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_b).name_hint", "as [vi, vj]: tir.bind(vi, i) tir.bind(vj, j) B[vi, vj] =", "= sch.get_block(\"C\") block_d = sch.get_block(\"D\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_consumer_inlined, sch.mod[\"main\"]) assert sch.get(block_c).name_hint", "C[vi, vj] = B[vi, vj] + 1.0 @tvm.script.tir def elementwise_predicate_inlined(a:", "* 2.0 # pylint: enable=no-member,invalid-name,unused-variable def test_compute_inline_elementwise(): sch = tir.Schedule(elementwise,", "= A[vi, vj] * 2.0 + 1.0 with tir.block([128, 128],", "tvm import tir from tvm.script import ty # pylint: disable=no-member,invalid-name,unused-variable", "sch = tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\")", "test_reverse_compute_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_reverse_loads, debug_mode=True) block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_multi_reverse_loads_inlined,", "pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_opaque_access_load(): sch = tir.Schedule(opaque_access_load, debug_mode=True) block_b =", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "disable=no-member,invalid-name,unused-variable @tvm.script.tir def elementwise(a: ty.handle, c: ty.handle) -> None: A", "in tir.serial(0, 128): for j in tir.serial(0, 128): with tir.block([128,", "elementwise_predicate_inlined(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_under_loop(): sch =", "tir.match_buffer(c, (128, 128)) D = tir.match_buffer(d, (128, 128)) with tir.block([128,", "128)) B = tir.alloc_buffer((128, 128)) for i in tir.serial(0, 128):", "with the License. You may obtain a copy of the", "+ 1.0) C[vi, vj] = tir.load(\"float32\", B.data, vi * 16", "block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert", "\"C\") as [vi, vj]: tir.where(B[i, j] < 10.0) C[vi, vj]", "1.0) * (A[vi, vj] * 2.0 * 2.0) + 3.0", "def test_opaque_access_load(): sch = tir.Schedule(opaque_access_load, debug_mode=True) block_b = sch.get_block(\"B\") with", "2] @tvm.script.tir def elementwise_multi_loads_inlined(a: ty.handle, c: ty.handle) -> None: A", "applicable law or agreed to in writing, # software distributed", "tir.block([128, 128], \"C\") as [vi, vj]: tir.where(B[i, j] < 10.0)", "= sch.get_block(\"B\") block_c = sch.get_block(\"C\") block_d = sch.get_block(\"D\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_consumer_inlined,", "vj) + 1.0 @tvm.script.tir def buffer_matched(a: ty.handle, c: ty.handle) ->", "sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_fail_as_dce(): sch", "# pylint: disable=missing-function-docstring,missing-module-docstring import pytest import tvm from tvm import", "vj]: Bb = tir.match_buffer(B[vi : vi + 1, vj], (1,", "vj]: C[vi, vj] = B[vi, vj] + B[vi, vj +", "as [vi, vj]: B[vi, vj] = A[vi, vj] * 2.0", "tir.Schedule(opaque_access_load, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_opaque_access_store():", "= A[vi, vj] * 2.0 C[vi, vj] = A[vi, vj]", "[vi, vj]: D[vi, vj] = B[vi, vj] + C[vi, vj]", "vj], (1, 1)) C[vi, vj] = Bb[0, 0] + 1.0", "= tir.Schedule(elementwise_multi_reverse_loads, debug_mode=True) block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_multi_reverse_loads_inlined, sch.mod[\"main\"]) def", "+ 1.0 @tvm.script.tir def elementwise_standalone_dce(a: ty.handle, c: ty.handle) -> None:", "block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_multi_reverse_loads_inlined, sch.mod[\"main\"]) def test_reverse_compute_fail_multi_reverse_loads(): sch =", "-> None: A = tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c,", "is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES", "A[vi, vj] + 2.0 with tir.block([128, 128], \"C\") as [vi,", "@tvm.script.tir def opaque_access_store(a: ty.handle, c: ty.handle) -> None: A =", "file # to you under the Apache License, Version 2.0", "vj + 2] * 2.0 # pylint: enable=no-member,invalid-name,unused-variable def test_compute_inline_elementwise():", "# with the License. You may obtain a copy of", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "A[vi, vj] * 2.0 for i, j in tir.grid(128, 128):", "B[vi, vj + 1] + B[vi, vj + 2] @tvm.script.tir", "for j in tir.serial(0, 128): with tir.block([128, 128], \"C\") as", "language governing permissions and limitations # under the License. #", "tir.match_buffer(c, (128, 128)) with tir.block([128, 126], \"C\") as [vi, vj]:", "tir.grid(128, 128): with tir.block([128, 128], \"C\") as [vi, vj]: tir.where(A[i,", "block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_b).name_hint == \"B\"", "assert sch.get(block_c).name_hint == \"C\" def test_compute_inline_multi_consumer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True)", "sch = tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\")", "software distributed under the License is distributed on an #", "Licensed to the Apache Software Foundation (ASF) under one #", "(A[vi, vj] * 2.0 + 1.0) * (A[vi, vj] *", "tir.Schedule(opaque_access_store, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_buffer_matched():", "= B[vi, vj] + 1.0 @tvm.script.tir def elementwise_predicate_inlined(a: ty.handle, c:", "sch = tir.Schedule(elementwise_multi_loads, debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_loads_inlined, sch.mod[\"main\"])", "def elementwise_multi_loads(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "with tir.block([128, 128], \"D\") as [vi, vj]: D[vi, vj] =", "c: ty.handle) -> None: A = tir.match_buffer(a, (128, 128)) C", "tir.match_buffer(c, (128, 128)) with tir.block([128, 128], \"C\") as [vi, vj]:", "with tir.block([128, 128], \"C\") as [vi, vj]: Bb = tir.match_buffer(B[vi", "def test_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\") block_c", "under one # or more contributor license agreements. See the", "License. # pylint: disable=missing-function-docstring,missing-module-docstring import pytest import tvm from tvm", "\"C\") as [vi, vj]: C[vi, vj] = A[vi, vj] +", "sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"]) assert sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_fail_as_dce():", "sch.mod[\"main\"]) assert sch.get(block_b).name_hint == \"B\" def test_reverse_compute_inline_fail_as_dce(): sch = tir.Schedule(elementwise_standalone,", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "vj] # D has two producers @tvm.script.tir def elementwise_multi_consumer_inlined(a: ty.handle,", "@tvm.script.tir def elementwise_standalone_dce(a: ty.handle, c: ty.handle) -> None: A =", "i in tir.serial(0, 128): for j in tir.serial(0, 128): with", "1.0 @tvm.script.tir def fail_multi_reader_writer(a: ty.handle, d: ty.handle) -> None: A", "tir.Schedule(elementwise_under_loop, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_inlined,", "debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_inlined, sch.mod[\"main\"])", "= B[vi, vj] + 1.0 @tvm.script.tir def elementwise_inlined(a: ty.handle, c:", "sch.compute_inline(block_b) def test_buffer_matched(): sch = tir.Schedule(buffer_matched, debug_mode=True) block_b = sch.get_block(\"B\")", "elementwise_predicate(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "information # regarding copyright ownership. The ASF licenses this file", "C = tir.match_buffer(c, (128, 128)) with tir.block([128, 128], \"C\") as", "128): with tir.block([128, 128], \"B\") as [vi, vj]: tir.bind(vi, i)", "(128, 128)) C = tir.match_buffer(c, (128, 128)) for i, j", "the Apache License, Version 2.0 (the # \"License\"); you may", "sch.reverse_compute_inline(block_b) def test_reverse_compute_inline_fail_multi_producer(): sch = tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_d = sch.get_block(\"D\")", "with tir.block([128, 128], \"C\") as [vi, vj]: tir.where(B[i, j] <", "[vi, vj]: tir.where(A[i, j] * 2.0 < 10.0) C[vi, vj]", "debug_mode=True) block_b = sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_predicate_inlined, sch.mod[\"main\"]) def test_compute_inline_multi_loads(): sch", "+ 1.0 @tvm.script.tir def fail_multi_reader_writer(a: ty.handle, d: ty.handle) -> None:", "def elementwise_multi_consumer_inlined(a: ty.handle, c: ty.handle, d: ty.handle) -> None: A", "def elementwise_predicate(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "def elementwise_multi_reverse_loads(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "= tir.Schedule(elementwise_multi_producer_consumer, debug_mode=True) block_d = sch.get_block(\"D\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_d) def", "A = tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c, (128, 128))", "vj] = A[vi, vj] * 2.0 C[vi, vj] = A[vi,", "128)) with tir.block([128, 128], \"B\") as [vi, vj]: B[vi, vj]", "C[vi, vj] @tvm.script.tir def elementwise_standalone(a: ty.handle, c: ty.handle) -> None:", "(B[vi, vj] + 1.0) * (B[vi, vj] * 2.0) +", "you under the Apache License, Version 2.0 (the # \"License\");", "vj + 1] + B[vi, vj + 2] @tvm.script.tir def", "1.0 @tvm.script.tir def elementwise_predicate_inlined(a: ty.handle, c: ty.handle) -> None: A", "# KIND, either express or implied. See the License for", "tir.Schedule(elementwise_multi_loads, debug_mode=True) block_c = sch.get_block(\"C\") with pytest.raises(tvm.tir.ScheduleError): sch.reverse_compute_inline(block_c) def test_opaque_access_load():", "+ C[vi, vj] @tvm.script.tir def elementwise_standalone(a: ty.handle, c: ty.handle) ->", "A[vi, vj] * 2.0 + 1.0 @tvm.script.tir def fail_multi_reader_writer(a: ty.handle,", "sch = tir.Schedule(opaque_access_store, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b)", "== \"C\" assert sch.get(block_d).name_hint == \"D\" def test_compute_inline_fail_multi_writer(): sch =", "import ty # pylint: disable=no-member,invalid-name,unused-variable @tvm.script.tir def elementwise(a: ty.handle, c:", "def test_opaque_access_store(): sch = tir.Schedule(opaque_access_store, debug_mode=True) block_b = sch.get_block(\"B\") with", "sch.get_block(\"B\") sch.compute_inline(block_b) tvm.ir.assert_structural_equal(elementwise_multi_loads_inlined, sch.mod[\"main\"]) if __name__ == \"__main__\": test_compute_inline_elementwise() test_compute_inline_under_loop()", "A[vi, vj] + 1.0 @tvm.script.tir def elementwise_standalone_dce(a: ty.handle, c: ty.handle)", "+ 1.0 @tvm.script.tir def elementwise_under_loop(a: ty.handle, c: ty.handle) -> None:", "agreements. See the NOTICE file # distributed with this work", "\"C\") as [vi, vj]: tir.bind(vi, i) tir.bind(vj, j) C[vi, vj]", "def fail_multi_reader_writer(a: ty.handle, d: ty.handle) -> None: A = tir.match_buffer(a,", "test_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\") block_c =", "as [vi, vj]: tir.reads(B[0:128, 0:128]) tir.writes(C[0:128, 0:128]) tir.store(C.data, vi *", "C[vi, vj] = tir.load(\"float32\", B.data, vi * 128 + vj)", "vj] * 2.0 # B has two consumers with tir.block([128,", "licenses this file # to you under the Apache License,", "with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b) def test_reverse_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True) block_b", "elementwise_standalone(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "elementwise(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "1, vj], (1, 1)) C[vi, vj] = Bb[0, 0] +", "@tvm.script.tir def elementwise_multi_loads(a: ty.handle, c: ty.handle) -> None: A =", "@tvm.script.tir def elementwise_multi_reverse_loads_inlined(a: ty.handle, c: ty.handle) -> None: A =", "by applicable law or agreed to in writing, # software", "# Unless required by applicable law or agreed to in", "with tir.block([128, 128], \"C\") as [vi, vj]: C[vi, vj] =", "@tvm.script.tir def elementwise_inlined(a: ty.handle, c: ty.handle) -> None: A =", "for i in tir.serial(0, 128): for j in tir.serial(0, 128):", "tir.block([128, 128], \"C\") as [vi, vj]: D[vi, vj] = B[vi,", "# under the License. # pylint: disable=missing-function-docstring,missing-module-docstring import pytest import", "= tir.match_buffer(a, (128, 128)) C = tir.match_buffer(c, (128, 128)) B", "test_reverse_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\") block_c =", "@tvm.script.tir def elementwise_multi_producer_consumer(a: ty.handle, c: ty.handle, d: ty.handle) -> None:", "\"D\" def test_compute_inline_fail_multi_writer(): sch = tir.Schedule(fail_multi_reader_writer, debug_mode=True, error_render_level=\"detail\") block_b =", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND,", "1.0 @tvm.script.tir def elementwise_multi_loads(a: ty.handle, c: ty.handle) -> None: A", "(128, 128)) with tir.block([128, 126], \"C\") as [vi, vj]: C[vi,", "vj] = B[vi, vj] + 1.0 with tir.block([128, 128], \"D\")", "under the License. # pylint: disable=missing-function-docstring,missing-module-docstring import pytest import tvm", "License. You may obtain a copy of the License at", "as [vi, vj]: tir.bind(vi, i) tir.bind(vj, j) C[vi, vj] =", "sch = tir.Schedule(opaque_access_load, debug_mode=True) block_b = sch.get_block(\"B\") with pytest.raises(tvm.tir.ScheduleError): sch.compute_inline(block_b)", "+ A[vi, vj + 2] * 2.0 # pylint: enable=no-member,invalid-name,unused-variable", "You may obtain a copy of the License at #", "vj) + 1.0 @tvm.script.tir def opaque_access_store(a: ty.handle, c: ty.handle) ->", "1.0 @tvm.script.tir def elementwise_standalone_dce(a: ty.handle, c: ty.handle) -> None: A", "= tir.match_buffer(c, (128, 128)) B = tir.alloc_buffer((128, 128)) for i", "[vi, vj]: C[vi, vj] = (B[vi, vj] + 1.0) *", "* 16 + vj) + 1.0 @tvm.script.tir def buffer_matched(a: ty.handle,", "opaque_access_store(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a, (128,", "vj] = B[vi, vj] + 1.0 @tvm.script.tir def elementwise_predicate_inlined(a: ty.handle,", "as [vi, vj]: C[vi, vj] = A[vi, vj] + 1.0", "j in tir.serial(0, 128): with tir.block([128, 128], \"B\") as [vi,", "def test_reverse_compute_inline_elementwise(): sch = tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\") block_c", "\"C\") as [vi, vj]: C[vi, vj] = A[vi, vj] *", "compliance # with the License. You may obtain a copy", "= tir.Schedule(elementwise, debug_mode=True) block_b = sch.get_block(\"B\") block_c = sch.get_block(\"C\") sch.compute_inline(block_b)", "B.data, vi * 16 + vj) + 1.0 @tvm.script.tir def", "* 2.0 with tir.block([128, 128], \"C\") as [vi, vj]: C[vi,", "= B[vi, vj] + B[vi, vj + 1] + B[vi,", "def elementwise_inlined(a: ty.handle, c: ty.handle) -> None: A = tir.match_buffer(a,", "B[vi, vj] + C[vi, vj] @tvm.script.tir def elementwise_multi_reverse_loads(a: ty.handle, c:", "tvm.script import ty # pylint: disable=no-member,invalid-name,unused-variable @tvm.script.tir def elementwise(a: ty.handle,", "sch.get_block(\"C\") sch.reverse_compute_inline(block_c) tvm.ir.assert_structural_equal(elementwise_multi_reverse_loads_inlined, sch.mod[\"main\"]) def test_reverse_compute_fail_multi_reverse_loads(): sch = tir.Schedule(elementwise_multi_loads, debug_mode=True)" ]
[ "request, redirect import subprocess import tempfile import json import yaml", "\"%sjobs/%i\" % (request.url_root, self.jobid), \"log\": \"%sjobs/%i/log\" % (request.url_root, self.jobid), \"run\":", "= self.proc.communicate(self.inputobj) if self.proc.returncode == 0: outobj = yaml.load(self.stdoutdata, Loader=yaml.FullLoader)", "def begin(self): loghandle, self.logname = tempfile.mkstemp() with self.updatelock: self.outdir =", "self.status[\"state\"] = \"Paused\" def resume(self): if self.status[\"state\"] == \"Paused\": self.proc.send_signal(signal.SIGCONT)", "yield r else: with job.updatelock: if job.status[\"state\"] != \"Running\": break", "yaml import signal import threading import time import copy app", "self.status = { \"id\": \"%sjobs/%i\" % (request.url_root, self.jobid), \"log\": \"%sjobs/%i/log\"", "if request.method == 'POST': action = request.args.get(\"action\") if action: if", "\"Running\": self.proc.send_signal(signal.SIGTSTP) with self.updatelock: self.status[\"state\"] = \"Paused\" def resume(self): if", "len(jobs) job = Job(jobid, path, request.stream.read()) job.start() jobs.append(job) return redirect(\"/jobs/%i\"", "jobid, code=303) @app.route(\"/jobs/<int:jobid>\", methods=['GET', 'POST']) def jobcontrol(jobid): with jobs_lock: job", "False else: yield \", \" + json.dumps(j.getstatus(), indent=4) yield \"]\"", "import copy app = Flask(__name__) jobs_lock = threading.Lock() jobs =", "self.proc.send_signal(signal.SIGQUIT) with self.updatelock: self.status[\"state\"] = \"Canceled\" def pause(self): if self.status[\"state\"]", "\"Canceled\" def pause(self): if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGTSTP) with self.updatelock:", "== \"pause\": job.pause() elif action == \"resume\": job.resume() status =", "True: r = f.read(4096) if r: yield r else: with", "yield \", \" + json.dumps(j.getstatus(), indent=4) yield \"]\" return Response(spool(jobscopy))", "threading.Lock() self.begin() def begin(self): loghandle, self.logname = tempfile.mkstemp() with self.updatelock:", "self.logname = tempfile.mkstemp() with self.updatelock: self.outdir = tempfile.mkdtemp() self.proc =", "def __init__(self, jobid, path, inputobj): super(Job, self).__init__() self.jobid = jobid", "import json import yaml import signal import threading import time", "self.stdoutdata, self.stderrdata = self.proc.communicate(self.inputobj) if self.proc.returncode == 0: outobj =", "else: with job.updatelock: if job.status[\"state\"] != \"Running\": break time.sleep(1) @app.route(\"/jobs/<int:jobid>/log\",", "jobs[jobid] if request.method == 'POST': action = request.args.get(\"action\") if action:", "if self.status[\"state\"] == \"Paused\": self.proc.send_signal(signal.SIGCONT) with self.updatelock: self.status[\"state\"] = \"Running\"", "self.path, \"-\"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=loghandle, close_fds=True, cwd=self.outdir) self.status = {", "job.updatelock: if job.status[\"state\"] != \"Running\": break time.sleep(1) @app.route(\"/jobs/<int:jobid>/log\", methods=['GET']) def", "None} def run(self): self.stdoutdata, self.stderrdata = self.proc.communicate(self.inputobj) if self.proc.returncode ==", "= { \"id\": \"%sjobs/%i\" % (request.url_root, self.jobid), \"log\": \"%sjobs/%i/log\" %", "from flask import Flask, Response, request, redirect import subprocess import", "self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGTSTP) with self.updatelock: self.status[\"state\"] = \"Paused\" def", "jobscopy = copy.copy(jobs) def spool(jc): yield \"[\" first = True", "yield \"[\" first = True for j in jc: if", "resume(self): if self.status[\"state\"] == \"Paused\": self.proc.send_signal(signal.SIGCONT) with self.updatelock: self.status[\"state\"] =", "with jobs_lock: job = jobs[jobid] if request.method == 'POST': action", "jobid self.path = path self.inputobj = inputobj self.updatelock = threading.Lock()", "methods=['GET', 'POST']) def jobcontrol(jobid): with jobs_lock: job = jobs[jobid] if", "self.status[\"state\"] = \"Success\" self.status[\"output\"] = outobj else: with self.updatelock: self.status[\"state\"]", "= len(jobs) job = Job(jobid, path, request.stream.read()) job.start() jobs.append(job) return", "= tempfile.mkstemp() with self.updatelock: self.outdir = tempfile.mkdtemp() self.proc = subprocess.Popen([\"cwl-runner\",", "\"]\" return Response(spool(jobscopy)) if __name__ == \"__main__\": # app.debug =", "with self.updatelock: self.status[\"state\"] = \"Paused\" def resume(self): if self.status[\"state\"] ==", "jobid, path, inputobj): super(Job, self).__init__() self.jobid = jobid self.path =", "json.dumps(j.getstatus(), indent=4) yield \"]\" return Response(spool(jobscopy)) if __name__ == \"__main__\":", "self.proc.communicate(self.inputobj) if self.proc.returncode == 0: outobj = yaml.load(self.stdoutdata, Loader=yaml.FullLoader) with", "= \"Failed\" def getstatus(self): with self.updatelock: return self.status.copy() def cancel(self):", "def getlog(jobid): with jobs_lock: job = jobs[jobid] return Response(logspooler(job)) @app.route(\"/jobs\",", "class Job(threading.Thread): def __init__(self, jobid, path, inputobj): super(Job, self).__init__() self.jobid", "self.updatelock: self.outdir = tempfile.mkdtemp() self.proc = subprocess.Popen([\"cwl-runner\", self.path, \"-\"], stdin=subprocess.PIPE,", "status = job.getstatus() return json.dumps(status, indent=4), 200, \"\" def logspooler(job):", "job = jobs[jobid] return Response(logspooler(job)) @app.route(\"/jobs\", methods=['GET']) def getjobs(): with", "self.status[\"state\"] == \"Paused\": self.proc.send_signal(signal.SIGCONT) with self.updatelock: self.status[\"state\"] = \"Running\" @app.route(\"/run\",", "= request.args[\"wf\"] with jobs_lock: jobid = len(jobs) job = Job(jobid,", "else: yield \", \" + json.dumps(j.getstatus(), indent=4) yield \"]\" return", "def jobcontrol(jobid): with jobs_lock: job = jobs[jobid] if request.method ==", "if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGQUIT) with self.updatelock: self.status[\"state\"] = \"Canceled\"", "job.cancel() elif action == \"pause\": job.pause() elif action == \"resume\":", "path = request.args[\"wf\"] with jobs_lock: jobid = len(jobs) job =", "= subprocess.Popen([\"cwl-runner\", self.path, \"-\"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=loghandle, close_fds=True, cwd=self.outdir) self.status", "\"Running\", \"input\": json.loads(self.inputobj), \"output\": None} def run(self): self.stdoutdata, self.stderrdata =", "jobs = [] class Job(threading.Thread): def __init__(self, jobid, path, inputobj):", "self.updatelock: self.status[\"state\"] = \"Canceled\" def pause(self): if self.status[\"state\"] == \"Running\":", "return self.status.copy() def cancel(self): if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGQUIT) with", "else: with self.updatelock: self.status[\"state\"] = \"Failed\" def getstatus(self): with self.updatelock:", "job.start() jobs.append(job) return redirect(\"/jobs/%i\" % jobid, code=303) @app.route(\"/jobs/<int:jobid>\", methods=['GET', 'POST'])", "stdout=subprocess.PIPE, stderr=loghandle, close_fds=True, cwd=self.outdir) self.status = { \"id\": \"%sjobs/%i\" %", "self.proc.send_signal(signal.SIGTSTP) with self.updatelock: self.status[\"state\"] = \"Paused\" def resume(self): if self.status[\"state\"]", "time.sleep(1) @app.route(\"/jobs/<int:jobid>/log\", methods=['GET']) def getlog(jobid): with jobs_lock: job = jobs[jobid]", "= \"Canceled\" def pause(self): if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGTSTP) with", "job.pause() elif action == \"resume\": job.resume() status = job.getstatus() return", "\"Success\" self.status[\"output\"] = outobj else: with self.updatelock: self.status[\"state\"] = \"Failed\"", "for j in jc: if first: yield json.dumps(j.getstatus(), indent=4) first", "self.jobid), \"log\": \"%sjobs/%i/log\" % (request.url_root, self.jobid), \"run\": self.path, \"state\": \"Running\",", "'POST': action = request.args.get(\"action\") if action: if action == \"cancel\":", "request.stream.read()) job.start() jobs.append(job) return redirect(\"/jobs/%i\" % jobid, code=303) @app.route(\"/jobs/<int:jobid>\", methods=['GET',", "request.args.get(\"action\") if action: if action == \"cancel\": job.cancel() elif action", "jobid = len(jobs) job = Job(jobid, path, request.stream.read()) job.start() jobs.append(job)", "self.status[\"output\"] = outobj else: with self.updatelock: self.status[\"state\"] = \"Failed\" def", "self.status.copy() def cancel(self): if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGQUIT) with self.updatelock:", "tempfile.mkdtemp() self.proc = subprocess.Popen([\"cwl-runner\", self.path, \"-\"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=loghandle, close_fds=True,", "self.proc.returncode == 0: outobj = yaml.load(self.stdoutdata, Loader=yaml.FullLoader) with self.updatelock: self.status[\"state\"]", "first = False else: yield \", \" + json.dumps(j.getstatus(), indent=4)", "self.status[\"state\"] = \"Failed\" def getstatus(self): with self.updatelock: return self.status.copy() def", "with self.updatelock: self.outdir = tempfile.mkdtemp() self.proc = subprocess.Popen([\"cwl-runner\", self.path, \"-\"],", "\"Running\": self.proc.send_signal(signal.SIGQUIT) with self.updatelock: self.status[\"state\"] = \"Canceled\" def pause(self): if", "== 'POST': action = request.args.get(\"action\") if action: if action ==", "open(job.logname, \"r\") as f: while True: r = f.read(4096) if", "def spool(jc): yield \"[\" first = True for j in", "import time import copy app = Flask(__name__) jobs_lock = threading.Lock()", "threading.Lock() jobs = [] class Job(threading.Thread): def __init__(self, jobid, path,", "r: yield r else: with job.updatelock: if job.status[\"state\"] != \"Running\":", "self.updatelock: return self.status.copy() def cancel(self): if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGQUIT)", "\"pause\": job.pause() elif action == \"resume\": job.resume() status = job.getstatus()", "with self.updatelock: return self.status.copy() def cancel(self): if self.status[\"state\"] == \"Running\":", "job = Job(jobid, path, request.stream.read()) job.start() jobs.append(job) return redirect(\"/jobs/%i\" %", "= Flask(__name__) jobs_lock = threading.Lock() jobs = [] class Job(threading.Thread):", "if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGTSTP) with self.updatelock: self.status[\"state\"] = \"Paused\"", "\"Paused\" def resume(self): if self.status[\"state\"] == \"Paused\": self.proc.send_signal(signal.SIGCONT) with self.updatelock:", "import yaml import signal import threading import time import copy", "def resume(self): if self.status[\"state\"] == \"Paused\": self.proc.send_signal(signal.SIGCONT) with self.updatelock: self.status[\"state\"]", "def pause(self): if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGTSTP) with self.updatelock: self.status[\"state\"]", "return Response(logspooler(job)) @app.route(\"/jobs\", methods=['GET']) def getjobs(): with jobs_lock: jobscopy =", "= path self.inputobj = inputobj self.updatelock = threading.Lock() self.begin() def", "+ json.dumps(j.getstatus(), indent=4) yield \"]\" return Response(spool(jobscopy)) if __name__ ==", "self.outdir = tempfile.mkdtemp() self.proc = subprocess.Popen([\"cwl-runner\", self.path, \"-\"], stdin=subprocess.PIPE, stdout=subprocess.PIPE,", "first: yield json.dumps(j.getstatus(), indent=4) first = False else: yield \",", "indent=4) first = False else: yield \", \" + json.dumps(j.getstatus(),", "== \"resume\": job.resume() status = job.getstatus() return json.dumps(status, indent=4), 200,", "self.updatelock: self.status[\"state\"] = \"Running\" @app.route(\"/run\", methods=['POST']) def runworkflow(): path =", "def logspooler(job): with open(job.logname, \"r\") as f: while True: r", "Job(jobid, path, request.stream.read()) job.start() jobs.append(job) return redirect(\"/jobs/%i\" % jobid, code=303)", "!= \"Running\": break time.sleep(1) @app.route(\"/jobs/<int:jobid>/log\", methods=['GET']) def getlog(jobid): with jobs_lock:", "return Response(spool(jobscopy)) if __name__ == \"__main__\": # app.debug = True", "\"log\": \"%sjobs/%i/log\" % (request.url_root, self.jobid), \"run\": self.path, \"state\": \"Running\", \"input\":", "begin(self): loghandle, self.logname = tempfile.mkstemp() with self.updatelock: self.outdir = tempfile.mkdtemp()", "while True: r = f.read(4096) if r: yield r else:", "= \"Paused\" def resume(self): if self.status[\"state\"] == \"Paused\": self.proc.send_signal(signal.SIGCONT) with", "\"[\" first = True for j in jc: if first:", "jobcontrol(jobid): with jobs_lock: job = jobs[jobid] if request.method == 'POST':", "with jobs_lock: job = jobs[jobid] return Response(logspooler(job)) @app.route(\"/jobs\", methods=['GET']) def", "'POST']) def jobcontrol(jobid): with jobs_lock: job = jobs[jobid] if request.method", "\"output\": None} def run(self): self.stdoutdata, self.stderrdata = self.proc.communicate(self.inputobj) if self.proc.returncode", "% jobid, code=303) @app.route(\"/jobs/<int:jobid>\", methods=['GET', 'POST']) def jobcontrol(jobid): with jobs_lock:", "= request.args.get(\"action\") if action: if action == \"cancel\": job.cancel() elif", "if r: yield r else: with job.updatelock: if job.status[\"state\"] !=", "job.getstatus() return json.dumps(status, indent=4), 200, \"\" def logspooler(job): with open(job.logname,", "flask import Flask, Response, request, redirect import subprocess import tempfile", "threading import time import copy app = Flask(__name__) jobs_lock =", "== 0: outobj = yaml.load(self.stdoutdata, Loader=yaml.FullLoader) with self.updatelock: self.status[\"state\"] =", "\"state\": \"Running\", \"input\": json.loads(self.inputobj), \"output\": None} def run(self): self.stdoutdata, self.stderrdata", "Response, request, redirect import subprocess import tempfile import json import", "with self.updatelock: self.status[\"state\"] = \"Canceled\" def pause(self): if self.status[\"state\"] ==", "self.proc = subprocess.Popen([\"cwl-runner\", self.path, \"-\"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=loghandle, close_fds=True, cwd=self.outdir)", "Response(logspooler(job)) @app.route(\"/jobs\", methods=['GET']) def getjobs(): with jobs_lock: jobscopy = copy.copy(jobs)", "with job.updatelock: if job.status[\"state\"] != \"Running\": break time.sleep(1) @app.route(\"/jobs/<int:jobid>/log\", methods=['GET'])", "cwd=self.outdir) self.status = { \"id\": \"%sjobs/%i\" % (request.url_root, self.jobid), \"log\":", "action = request.args.get(\"action\") if action: if action == \"cancel\": job.cancel()", "app = Flask(__name__) jobs_lock = threading.Lock() jobs = [] class", "loghandle, self.logname = tempfile.mkstemp() with self.updatelock: self.outdir = tempfile.mkdtemp() self.proc", "\"%sjobs/%i/log\" % (request.url_root, self.jobid), \"run\": self.path, \"state\": \"Running\", \"input\": json.loads(self.inputobj),", "= [] class Job(threading.Thread): def __init__(self, jobid, path, inputobj): super(Job,", "if job.status[\"state\"] != \"Running\": break time.sleep(1) @app.route(\"/jobs/<int:jobid>/log\", methods=['GET']) def getlog(jobid):", "\"Failed\" def getstatus(self): with self.updatelock: return self.status.copy() def cancel(self): if", "jobs_lock: jobid = len(jobs) job = Job(jobid, path, request.stream.read()) job.start()", "@app.route(\"/jobs/<int:jobid>/log\", methods=['GET']) def getlog(jobid): with jobs_lock: job = jobs[jobid] return", "self.path = path self.inputobj = inputobj self.updatelock = threading.Lock() self.begin()", "jobs_lock: jobscopy = copy.copy(jobs) def spool(jc): yield \"[\" first =", "% (request.url_root, self.jobid), \"log\": \"%sjobs/%i/log\" % (request.url_root, self.jobid), \"run\": self.path,", "= threading.Lock() self.begin() def begin(self): loghandle, self.logname = tempfile.mkstemp() with", "f: while True: r = f.read(4096) if r: yield r", "first = True for j in jc: if first: yield", "methods=['POST']) def runworkflow(): path = request.args[\"wf\"] with jobs_lock: jobid =", "\"Running\": break time.sleep(1) @app.route(\"/jobs/<int:jobid>/log\", methods=['GET']) def getlog(jobid): with jobs_lock: job", "stderr=loghandle, close_fds=True, cwd=self.outdir) self.status = { \"id\": \"%sjobs/%i\" % (request.url_root,", "self.jobid), \"run\": self.path, \"state\": \"Running\", \"input\": json.loads(self.inputobj), \"output\": None} def", "self.updatelock: self.status[\"state\"] = \"Paused\" def resume(self): if self.status[\"state\"] == \"Paused\":", "= inputobj self.updatelock = threading.Lock() self.begin() def begin(self): loghandle, self.logname", "super(Job, self).__init__() self.jobid = jobid self.path = path self.inputobj =", "self.path, \"state\": \"Running\", \"input\": json.loads(self.inputobj), \"output\": None} def run(self): self.stdoutdata,", "self.stderrdata = self.proc.communicate(self.inputobj) if self.proc.returncode == 0: outobj = yaml.load(self.stdoutdata,", "= Job(jobid, path, request.stream.read()) job.start() jobs.append(job) return redirect(\"/jobs/%i\" % jobid,", "if action == \"cancel\": job.cancel() elif action == \"pause\": job.pause()", "= jobs[jobid] return Response(logspooler(job)) @app.route(\"/jobs\", methods=['GET']) def getjobs(): with jobs_lock:", "0: outobj = yaml.load(self.stdoutdata, Loader=yaml.FullLoader) with self.updatelock: self.status[\"state\"] = \"Success\"", "action == \"resume\": job.resume() status = job.getstatus() return json.dumps(status, indent=4),", "inputobj self.updatelock = threading.Lock() self.begin() def begin(self): loghandle, self.logname =", "return json.dumps(status, indent=4), 200, \"\" def logspooler(job): with open(job.logname, \"r\")", "outobj else: with self.updatelock: self.status[\"state\"] = \"Failed\" def getstatus(self): with", "= \"Success\" self.status[\"output\"] = outobj else: with self.updatelock: self.status[\"state\"] =", "self.status[\"state\"] = \"Canceled\" def pause(self): if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGTSTP)", "\", \" + json.dumps(j.getstatus(), indent=4) yield \"]\" return Response(spool(jobscopy)) if", "\" + json.dumps(j.getstatus(), indent=4) yield \"]\" return Response(spool(jobscopy)) if __name__", "== \"cancel\": job.cancel() elif action == \"pause\": job.pause() elif action", "inputobj): super(Job, self).__init__() self.jobid = jobid self.path = path self.inputobj", "== \"Paused\": self.proc.send_signal(signal.SIGCONT) with self.updatelock: self.status[\"state\"] = \"Running\" @app.route(\"/run\", methods=['POST'])", "== \"Running\": self.proc.send_signal(signal.SIGTSTP) with self.updatelock: self.status[\"state\"] = \"Paused\" def resume(self):", "import signal import threading import time import copy app =", "= threading.Lock() jobs = [] class Job(threading.Thread): def __init__(self, jobid,", "json.dumps(j.getstatus(), indent=4) first = False else: yield \", \" +", "yield \"]\" return Response(spool(jobscopy)) if __name__ == \"__main__\": # app.debug", "= \"Running\" @app.route(\"/run\", methods=['POST']) def runworkflow(): path = request.args[\"wf\"] with", "def run(self): self.stdoutdata, self.stderrdata = self.proc.communicate(self.inputobj) if self.proc.returncode == 0:", "redirect(\"/jobs/%i\" % jobid, code=303) @app.route(\"/jobs/<int:jobid>\", methods=['GET', 'POST']) def jobcontrol(jobid): with", "f.read(4096) if r: yield r else: with job.updatelock: if job.status[\"state\"]", "% (request.url_root, self.jobid), \"run\": self.path, \"state\": \"Running\", \"input\": json.loads(self.inputobj), \"output\":", "cancel(self): if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGQUIT) with self.updatelock: self.status[\"state\"] =", "self).__init__() self.jobid = jobid self.path = path self.inputobj = inputobj", "import tempfile import json import yaml import signal import threading", "jobs_lock = threading.Lock() jobs = [] class Job(threading.Thread): def __init__(self,", "subprocess.Popen([\"cwl-runner\", self.path, \"-\"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=loghandle, close_fds=True, cwd=self.outdir) self.status =", "(request.url_root, self.jobid), \"log\": \"%sjobs/%i/log\" % (request.url_root, self.jobid), \"run\": self.path, \"state\":", "\"\" def logspooler(job): with open(job.logname, \"r\") as f: while True:", "path, inputobj): super(Job, self).__init__() self.jobid = jobid self.path = path", "= outobj else: with self.updatelock: self.status[\"state\"] = \"Failed\" def getstatus(self):", "path, request.stream.read()) job.start() jobs.append(job) return redirect(\"/jobs/%i\" % jobid, code=303) @app.route(\"/jobs/<int:jobid>\",", "with self.updatelock: self.status[\"state\"] = \"Failed\" def getstatus(self): with self.updatelock: return", "in jc: if first: yield json.dumps(j.getstatus(), indent=4) first = False", "copy.copy(jobs) def spool(jc): yield \"[\" first = True for j", "if self.proc.returncode == 0: outobj = yaml.load(self.stdoutdata, Loader=yaml.FullLoader) with self.updatelock:", "copy app = Flask(__name__) jobs_lock = threading.Lock() jobs = []", "indent=4) yield \"]\" return Response(spool(jobscopy)) if __name__ == \"__main__\": #", "@app.route(\"/jobs/<int:jobid>\", methods=['GET', 'POST']) def jobcontrol(jobid): with jobs_lock: job = jobs[jobid]", "def getstatus(self): with self.updatelock: return self.status.copy() def cancel(self): if self.status[\"state\"]", "with jobs_lock: jobid = len(jobs) job = Job(jobid, path, request.stream.read())", "= yaml.load(self.stdoutdata, Loader=yaml.FullLoader) with self.updatelock: self.status[\"state\"] = \"Success\" self.status[\"output\"] =", "as f: while True: r = f.read(4096) if r: yield", "with self.updatelock: self.status[\"state\"] = \"Running\" @app.route(\"/run\", methods=['POST']) def runworkflow(): path", "def cancel(self): if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGQUIT) with self.updatelock: self.status[\"state\"]", "with self.updatelock: self.status[\"state\"] = \"Success\" self.status[\"output\"] = outobj else: with", "self.status[\"state\"] = \"Running\" @app.route(\"/run\", methods=['POST']) def runworkflow(): path = request.args[\"wf\"]", "{ \"id\": \"%sjobs/%i\" % (request.url_root, self.jobid), \"log\": \"%sjobs/%i/log\" % (request.url_root,", "action == \"cancel\": job.cancel() elif action == \"pause\": job.pause() elif", "True for j in jc: if first: yield json.dumps(j.getstatus(), indent=4)", "jobs[jobid] return Response(logspooler(job)) @app.route(\"/jobs\", methods=['GET']) def getjobs(): with jobs_lock: jobscopy", "json.loads(self.inputobj), \"output\": None} def run(self): self.stdoutdata, self.stderrdata = self.proc.communicate(self.inputobj) if", "(request.url_root, self.jobid), \"run\": self.path, \"state\": \"Running\", \"input\": json.loads(self.inputobj), \"output\": None}", "Flask, Response, request, redirect import subprocess import tempfile import json", "import Flask, Response, request, redirect import subprocess import tempfile import", "getjobs(): with jobs_lock: jobscopy = copy.copy(jobs) def spool(jc): yield \"[\"", "job.resume() status = job.getstatus() return json.dumps(status, indent=4), 200, \"\" def", "tempfile import json import yaml import signal import threading import", "Response(spool(jobscopy)) if __name__ == \"__main__\": # app.debug = True app.run()", "== \"Running\": self.proc.send_signal(signal.SIGQUIT) with self.updatelock: self.status[\"state\"] = \"Canceled\" def pause(self):", "\"Paused\": self.proc.send_signal(signal.SIGCONT) with self.updatelock: self.status[\"state\"] = \"Running\" @app.route(\"/run\", methods=['POST']) def", "time import copy app = Flask(__name__) jobs_lock = threading.Lock() jobs", "= True for j in jc: if first: yield json.dumps(j.getstatus(),", "\"run\": self.path, \"state\": \"Running\", \"input\": json.loads(self.inputobj), \"output\": None} def run(self):", "\"id\": \"%sjobs/%i\" % (request.url_root, self.jobid), \"log\": \"%sjobs/%i/log\" % (request.url_root, self.jobid),", "indent=4), 200, \"\" def logspooler(job): with open(job.logname, \"r\") as f:", "= jobid self.path = path self.inputobj = inputobj self.updatelock =", "\"r\") as f: while True: r = f.read(4096) if r:", "jobs.append(job) return redirect(\"/jobs/%i\" % jobid, code=303) @app.route(\"/jobs/<int:jobid>\", methods=['GET', 'POST']) def", "= copy.copy(jobs) def spool(jc): yield \"[\" first = True for", "Flask(__name__) jobs_lock = threading.Lock() jobs = [] class Job(threading.Thread): def", "break time.sleep(1) @app.route(\"/jobs/<int:jobid>/log\", methods=['GET']) def getlog(jobid): with jobs_lock: job =", "self.inputobj = inputobj self.updatelock = threading.Lock() self.begin() def begin(self): loghandle,", "signal import threading import time import copy app = Flask(__name__)", "\"Running\" @app.route(\"/run\", methods=['POST']) def runworkflow(): path = request.args[\"wf\"] with jobs_lock:", "getlog(jobid): with jobs_lock: job = jobs[jobid] return Response(logspooler(job)) @app.route(\"/jobs\", methods=['GET'])", "path self.inputobj = inputobj self.updatelock = threading.Lock() self.begin() def begin(self):", "= jobs[jobid] if request.method == 'POST': action = request.args.get(\"action\") if", "yaml.load(self.stdoutdata, Loader=yaml.FullLoader) with self.updatelock: self.status[\"state\"] = \"Success\" self.status[\"output\"] = outobj", "self.proc.send_signal(signal.SIGCONT) with self.updatelock: self.status[\"state\"] = \"Running\" @app.route(\"/run\", methods=['POST']) def runworkflow():", "= tempfile.mkdtemp() self.proc = subprocess.Popen([\"cwl-runner\", self.path, \"-\"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=loghandle,", "def getjobs(): with jobs_lock: jobscopy = copy.copy(jobs) def spool(jc): yield", "return redirect(\"/jobs/%i\" % jobid, code=303) @app.route(\"/jobs/<int:jobid>\", methods=['GET', 'POST']) def jobcontrol(jobid):", "200, \"\" def logspooler(job): with open(job.logname, \"r\") as f: while", "\"input\": json.loads(self.inputobj), \"output\": None} def run(self): self.stdoutdata, self.stderrdata = self.proc.communicate(self.inputobj)", "Loader=yaml.FullLoader) with self.updatelock: self.status[\"state\"] = \"Success\" self.status[\"output\"] = outobj else:", "r else: with job.updatelock: if job.status[\"state\"] != \"Running\": break time.sleep(1)", "[] class Job(threading.Thread): def __init__(self, jobid, path, inputobj): super(Job, self).__init__()", "subprocess import tempfile import json import yaml import signal import", "self.jobid = jobid self.path = path self.inputobj = inputobj self.updatelock", "jobs_lock: job = jobs[jobid] return Response(logspooler(job)) @app.route(\"/jobs\", methods=['GET']) def getjobs():", "tempfile.mkstemp() with self.updatelock: self.outdir = tempfile.mkdtemp() self.proc = subprocess.Popen([\"cwl-runner\", self.path,", "request.method == 'POST': action = request.args.get(\"action\") if action: if action", "pause(self): if self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGTSTP) with self.updatelock: self.status[\"state\"] =", "job = jobs[jobid] if request.method == 'POST': action = request.args.get(\"action\")", "jobs_lock: job = jobs[jobid] if request.method == 'POST': action =", "if action: if action == \"cancel\": job.cancel() elif action ==", "self.status[\"state\"] == \"Running\": self.proc.send_signal(signal.SIGQUIT) with self.updatelock: self.status[\"state\"] = \"Canceled\" def", "logspooler(job): with open(job.logname, \"r\") as f: while True: r =", "methods=['GET']) def getjobs(): with jobs_lock: jobscopy = copy.copy(jobs) def spool(jc):", "request.args[\"wf\"] with jobs_lock: jobid = len(jobs) job = Job(jobid, path,", "@app.route(\"/run\", methods=['POST']) def runworkflow(): path = request.args[\"wf\"] with jobs_lock: jobid", "code=303) @app.route(\"/jobs/<int:jobid>\", methods=['GET', 'POST']) def jobcontrol(jobid): with jobs_lock: job =", "runworkflow(): path = request.args[\"wf\"] with jobs_lock: jobid = len(jobs) job", "self.begin() def begin(self): loghandle, self.logname = tempfile.mkstemp() with self.updatelock: self.outdir", "import subprocess import tempfile import json import yaml import signal", "r = f.read(4096) if r: yield r else: with job.updatelock:", "spool(jc): yield \"[\" first = True for j in jc:", "if first: yield json.dumps(j.getstatus(), indent=4) first = False else: yield", "run(self): self.stdoutdata, self.stderrdata = self.proc.communicate(self.inputobj) if self.proc.returncode == 0: outobj", "\"resume\": job.resume() status = job.getstatus() return json.dumps(status, indent=4), 200, \"\"", "json import yaml import signal import threading import time import", "Job(threading.Thread): def __init__(self, jobid, path, inputobj): super(Job, self).__init__() self.jobid =", "elif action == \"resume\": job.resume() status = job.getstatus() return json.dumps(status,", "job.status[\"state\"] != \"Running\": break time.sleep(1) @app.route(\"/jobs/<int:jobid>/log\", methods=['GET']) def getlog(jobid): with", "json.dumps(status, indent=4), 200, \"\" def logspooler(job): with open(job.logname, \"r\") as", "__init__(self, jobid, path, inputobj): super(Job, self).__init__() self.jobid = jobid self.path", "= job.getstatus() return json.dumps(status, indent=4), 200, \"\" def logspooler(job): with", "self.updatelock: self.status[\"state\"] = \"Success\" self.status[\"output\"] = outobj else: with self.updatelock:", "redirect import subprocess import tempfile import json import yaml import", "yield json.dumps(j.getstatus(), indent=4) first = False else: yield \", \"", "getstatus(self): with self.updatelock: return self.status.copy() def cancel(self): if self.status[\"state\"] ==", "methods=['GET']) def getlog(jobid): with jobs_lock: job = jobs[jobid] return Response(logspooler(job))", "with open(job.logname, \"r\") as f: while True: r = f.read(4096)", "j in jc: if first: yield json.dumps(j.getstatus(), indent=4) first =", "self.updatelock = threading.Lock() self.begin() def begin(self): loghandle, self.logname = tempfile.mkstemp()", "@app.route(\"/jobs\", methods=['GET']) def getjobs(): with jobs_lock: jobscopy = copy.copy(jobs) def", "close_fds=True, cwd=self.outdir) self.status = { \"id\": \"%sjobs/%i\" % (request.url_root, self.jobid),", "elif action == \"pause\": job.pause() elif action == \"resume\": job.resume()", "import threading import time import copy app = Flask(__name__) jobs_lock", "def runworkflow(): path = request.args[\"wf\"] with jobs_lock: jobid = len(jobs)", "self.updatelock: self.status[\"state\"] = \"Failed\" def getstatus(self): with self.updatelock: return self.status.copy()", "\"cancel\": job.cancel() elif action == \"pause\": job.pause() elif action ==", "outobj = yaml.load(self.stdoutdata, Loader=yaml.FullLoader) with self.updatelock: self.status[\"state\"] = \"Success\" self.status[\"output\"]", "= False else: yield \", \" + json.dumps(j.getstatus(), indent=4) yield", "action: if action == \"cancel\": job.cancel() elif action == \"pause\":", "with jobs_lock: jobscopy = copy.copy(jobs) def spool(jc): yield \"[\" first", "jc: if first: yield json.dumps(j.getstatus(), indent=4) first = False else:", "= f.read(4096) if r: yield r else: with job.updatelock: if", "stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=loghandle, close_fds=True, cwd=self.outdir) self.status = { \"id\": \"%sjobs/%i\"", "action == \"pause\": job.pause() elif action == \"resume\": job.resume() status", "\"-\"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=loghandle, close_fds=True, cwd=self.outdir) self.status = { \"id\":" ]
[ "2: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0100. n&1 =", "side if (i < 31) { // CATCH: for last", "binary as 0000_0000_0000_0000_0000_0000_0000_1101, the \"_\" is for readability), calling reverseBits(13)", "1 therefore result = result + 1 = = 0000_0000_0000_0000_0000_0000_0000_1011", "0000_0000_0000_0000_0000_0000_0000_0001 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore result =", "Apple Airbnb # Related Topics # Bit Manipulation # Similar", "unittest.main() Java = ''' # Thought: # 1ms 100% class", "for result <<=1, i.e. shifting result to the left by", "result + 1 = = 0000_0000_0000_0000_0000_0000_0000_1011 We right shift n", "= 0000_0000_0000_0000_0000_0000_0000_0000. n&1 = 0000_0000_0000_0000_0000_0000_0000_1101 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 =", "n = 13: Initially, result = 0 = 0000_0000_0000_0000_0000_0000_0000_0000, n", "(an integer has 32 bits). In each iteration: We first", "1 (n >>= 1) At the end of the iteration,", "n=5 (101), n&1 = 101 & 001 = 001 =", "this will result in padding 28 0's to the right", "__source__ = 'https://leetcode.com/problems/reverse-bits/description/' # https://github.com/kamyu104/LeetCode/blob/master/Python/reverse-bits.py # Time : O(n) #", "value public int reverseBits(int n) { n = ((n &", "1; if ((n & 1) == 1) result++; n >>=", "(101), n&1 = 101 & 001 = 001 = 1;", "1 = = 0000_0000_0000_0000_0000_0000_0000_1011 We right shift n by 1", "} # 1ms 100% class Solution { // you need", "given input 43261596 (represented in binary as 00000010100101000001111010011100), # return", "# import unittest class Solution: # @param n, an integer", "i++) { result <<= 1; if ((n & 1) ==", "= 0000_0000_0000_0000_0000_0000_0000_1101 Starting for loop: i = 0: result =", "0000_0000_0000_0000_0000_0000_0000_1101 Starting for loop: i = 0: result = result", "the end, we get result = 1011_0000_0000_0000_0000_0000_0000_0000 This is exactly", "Initially, result = 0 = 0000_0000_0000_0000_0000_0000_0000_0000, n = 13 =", "result = 1011_0000_0000_0000_0000_0000_0000_0000 This is exactly what we expected to", "n, an integer # @return an integer def reverseBits(self, n):", "} return result; } } # 1ms 100% class Solution", "= 001 = 1; however, if n = 2 (10),", "to i = 31 iterations left, this will result in", "0's to the right of result. i.e at the end,", "0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0000 = 0; therefore we don't increment result.", "1) result++; n >>= 1; } return result; } }", "(represented in binary as 00111001011110000010100101000000). # # Follow up: #", "i.e at the end, we get result = 1011_0000_0000_0000_0000_0000_0000_0000 This", "1, we add 1 to result. To find the last", "return 0; int result = 0; for (int i =", "Companies # Apple Airbnb # Related Topics # Bit Manipulation", "& 0x00FF00FF) << 8) | ((n & 0xFF00FF00) >>> 8);", "<<= 1; if ((n & 1) == 1) result++; n", "(n >>= 1) At the end of the iteration, we", "0: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0000. n&1 =", "__name__ == '__main__': unittest.main() Java = ''' # Thought: #", "0000_0000_0000_0000_0000_0000_0000_0001 = result = 0000_0000_0000_0000_0000_0000_0000_0101 We right shift n by", "1 result |= n & 1 n >>= 1 return", "= 0; i < 32; i++) { result <<= 1;", "# return 964176192 (represented in binary as 00111001011110000010100101000000). # #", "This is exactly what we expected to get # 1ms", "= 0 for i in xrange(32): result <<= 1 result", "exactly what we expected to get # 1ms 100% class", "Manipulation # # Description: Leetcode # 190. Reverse Bits #", "# Follow up: # If this function is called many", "in padding 28 0's to the right of result. i.e", "// you need treat n as an unsigned value public", "return ret; } } We first intitialize result to 0.", "964176192 (represented in binary as 00111001011110000010100101000000). # # Follow up:", "1 = 0000_0000_0000_0000_0000_0000_0000_0100 + 0000_0000_0000_0000_0000_0000_0000_0001 = result = 0000_0000_0000_0000_0000_0000_0000_0101 We", "= 0. Now, from here to the end of the", "(n&1) will always be 0 and n >>=1 will not", "return 964176192 (represented in binary as 00111001011110000010100101000000). # # Follow", "the iteration, we return result. Example, if input n =", "((n & 0x33333333) << 2) | ((n & 0xCCCCCCCC) >>>", "# Apple Airbnb # Related Topics # Bit Manipulation #", "the next iteration. i = 2: result = result <<", "<< 1 = 0000_0000_0000_0000_0000_0000_0000_0000. n&1 = 0000_0000_0000_0000_0000_0000_0000_1101 & 0000_0000_0000_0000_0000_0000_0000_0001 =", "= 0000_0000_0000_0000_0000_0000_0000_0101 We right shift n by 1 to get:", "((n & 0xAAAAAAAA) >>> 1); n = ((n & 0x33333333)", "We then go to the next iteration. i = 2:", "0. Now, from here to the end of the iteration,", "then go to the next iteration. i = 3: result", "unsigned value public int reverseBits(int n) { if (n ==", "update n by shifting it to the right by 1", "only change will be for result <<=1, i.e. shifting result", "# Reverse bits of a given 32 bits unsigned integer.", "At the end of the iteration, we return result. Example,", "iteration, we return result. Example, if input n = 13", "1) == 1) result++; n >>= 1; } return result;", "0000_0000_0000_0000_0000_0000_0000_0000 = 0. Now, from here to the end of", "the right of result. i.e at the end, we get", "by 1 bit. Then, if the last digit of input", "{ ret |= 1; //same as // res += n", "# Time : O(n) # Space: O(1) # Bit Manipulation", "Topics # Bit Manipulation # Similar Questions # Number of", "i = 0; i < 32; i++) { if ((n", "01 = 0). Finally, we update n by shifting it", "= 101 & 001 = 001 = 1; however, if", "each iteration: We first shift result to the left by", "last digit, don't shift! ret <<= 1; } } return", "+ 1 = = 0000_0000_0000_0000_0000_0000_0000_1011 We right shift n by", "0 and n >>=1 will not change n. The only", "to get: n = 0000_0000_0000_0000_0000_0000_0000_0001. We then go to the", "shifting result to the left by 1 digit. Since there", "result; } } # 1ms 100% class Solution { //", "= 13: Initially, result = 0 = 0000_0000_0000_0000_0000_0000_0000_0000, n =", "& 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0000 = 0; therefore we don't increment", "Now, from here to the end of the iteration, n", "integer has 32 bits). In each iteration: We first shift", "shift n by 1 (n >>= 1) to get: n", "example, given input 43261596 (represented in binary as 00000010100101000001111010011100), #", "unsigned integer. # # For example, given input 43261596 (represented", "bit. Then, if the last digit of input n is", "((n & 1) == 1) result++; n >>= 1; }", "loop: i = 0: result = result << 1 =", "n) { int ret = 0; for (int i =", "& 0xF0F0F0F0) >>> 4); n = ((n & 0x00FF00FF) <<", "function is called many times, how would you optimize it?", "i = 3: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_1010.", "1 digit. Since there we have i=4 to i =", "value public int reverseBits(int n) { int ret = 0;", "iteration. i = 1: result = result << 1 =", "0) return 0; int result = 0; for (int i", "n & 1 n >>= 1 return result class TestMethods(unittest.TestCase):", "n = 0000_0000_0000_0000_0000_0000_0000_0110. We then go to the next iteration.", "We right shift n by 1 to get: n =", "so (n&1) will always be 0 and n >>=1 will", "result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0000. n&1 = 0000_0000_0000_0000_0000_0000_0000_1101", "= 0000_0000_0000_0000_0000_0000_0000_0000 + 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 We right", "to 31 (an integer has 32 bits). In each iteration:", ">>= 1) At the end of the iteration, we return", "input n = 13 (represented in binary as 0000_0000_0000_0000_0000_0000_0000_1101, the", "get: n = 0000_0000_0000_0000_0000_0000_0000_0110. We then go to the next", "Example, if input n = 13 (represented in binary as", "to the next iteration. i = 1: result = result", "by 1 digit. Since there we have i=4 to i", "# If this function is called many times, how would", "left, this will result in padding 28 0's to the", "+= n & 1 } n >>>= 1; // padding", "= 1 We right shift n by 1 (n >>=", "would work for input n = 13: Initially, result =", "Follow up: # If this function is called many times,", "& 0xCCCCCCCC) >>> 2); n = ((n & 0x0F0F0F0F) <<", "& 01 = 0). Finally, we update n by shifting", "get: n = 0000_0000_0000_0000_0000_0000_0000_0011. We then go to the next", "'__main__': unittest.main() Java = ''' # Thought: # 1ms 100%", "We first intitialize result to 0. We then iterate from", "int reverseBits(int n) { int ret = 0; for (int", "expected to get # 1ms 100% class Solution { //", ">>> 1); n = ((n & 0x33333333) << 2) |", "i = 31 iterations left, this will result in padding", "2); n = ((n & 0x0F0F0F0F) << 4) | ((n", "by 1 (n >>= 1) At the end of the", "result << 1 = 0000_0000_0000_0000_0000_0000_0000_0010; n&1 = 0000_0000_0000_0000_0000_0000_0000_0110 & 0000_0000_0000_0000_0000_0000_0000_0001", "# # Follow up: # If this function is called", "calling reverseBits(13) should return: 1011_0000_0000_0000_0000_0000_0000_0000 Here is how our algorithm", "= 0000_0000_0000_0000_0000_0000_0000_0000, n = 13 = 0000_0000_0000_0000_0000_0000_0000_1101 Starting for loop:", "just do: (n & 1) Example, if n=5 (101), n&1", "from here to the end of the iteration, n is", "return result class TestMethods(unittest.TestCase): def test_Local(self): self.assertEqual(1, 1) print Solution().reverseBits(1)", "1); n = ((n & 0x33333333) << 2) | ((n", "If this function is called many times, how would you", "treat n as an unsigned value public int reverseBits(int n)", "n = ((n & 0x00FF00FF) << 8) | ((n &", "We then go to the next iteration. i = 1:", "the next iteration. i = 1: result = result <<", "for readability), calling reverseBits(13) should return: 1011_0000_0000_0000_0000_0000_0000_0000 Here is how", "unittest class Solution: # @param n, an integer # @return", "1 bit. Then, if the last digit of input n", "a given 32 bits unsigned integer. # # For example,", "# Companies # Apple Airbnb # Related Topics # Bit", "bits of a given 32 bits unsigned integer. # #", "digit of n, we just do: (n & 1) Example,", "input n = 13: Initially, result = 0 = 0000_0000_0000_0000_0000_0000_0000_0000,", "the left by 1 digit. Since there we have i=4", "result class TestMethods(unittest.TestCase): def test_Local(self): self.assertEqual(1, 1) print Solution().reverseBits(1) if", "we get result = 1011_0000_0000_0000_0000_0000_0000_0000 This is exactly what we", "n = 2 (10), n&1 = 10 & 01 =", ">>> 8); return (n >>> 16) | (n << 16);", "to get: n = 0000_0000_0000_0000_0000_0000_0000_0000 = 0. Now, from here", "32; i++) { result <<= 1; if ((n & 1)", "<<= 1 result |= n & 1 n >>= 1", "first shift result to the left by 1 bit. Then,", "0 on the left side if (i < 31) {", "as 0000_0000_0000_0000_0000_0000_0000_1101, the \"_\" is for readability), calling reverseBits(13) should", "n): result = 0 for i in xrange(32): result <<=", "1 We right shift n by 1 (n >>= 1)", "of a given 32 bits unsigned integer. # # For", "# Bit Manipulation # # Description: Leetcode # 190. Reverse", "by 1 (n >>= 1) to get: n = 0000_0000_0000_0000_0000_0000_0000_0011.", "for (int i = 0; i < 32; i++) {", ">>>= 1; // padding 0 on the left side if", "n >>=1 will not change n. The only change will", "right shift n by 1 to get: n = 0000_0000_0000_0000_0000_0000_0000_0000", "how would you optimize it? # # Companies # Apple", "integer. # # For example, given input 43261596 (represented in", "Thought: # 1ms 100% class Solution { // you need", "n >>>= 1; // padding 0 on the left side", "will not change n. The only change will be for", "<<=1, i.e. shifting result to the left by 1 digit.", "((n & 0xCCCCCCCC) >>> 2); n = ((n & 0x0F0F0F0F)", "& 0xFF00FF00) >>> 8); return (n >>> 16) | (n", "of the iteration, we return result. Example, if input n", "many times, how would you optimize it? # # Companies", "(represented in binary as 0000_0000_0000_0000_0000_0000_0000_1101, the \"_\" is for readability),", "0 to 31 (an integer has 32 bits). In each", "an unsigned value public int reverseBits(int n) { if (n", "xrange(32): result <<= 1 result |= n & 1 n", "n = ((n & 0x33333333) << 2) | ((n &", "the \"_\" is for readability), calling reverseBits(13) should return: 1011_0000_0000_0000_0000_0000_0000_0000", "left by 1 digit. Since there we have i=4 to", "iterations left, this will result in padding 28 0's to", "value public int reverseBits(int n) { if (n == 0)", "Solution().reverseBits(1) if __name__ == '__main__': unittest.main() Java = ''' #", "n&1 = 10 & 01 = 0). Finally, we update", "0000_0000_0000_0000_0000_0000_0000_0010; n&1 = 0000_0000_0000_0000_0000_0000_0000_0110 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0000 = 0;", "Bit Manipulation # # Description: Leetcode # 190. Reverse Bits", "= 0: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0000. n&1", "an integer # @return an integer def reverseBits(self, n): result", "n = ((n & 0x55555555) << 1) | ((n &", "in binary as 00000010100101000001111010011100), # return 964176192 (represented in binary", "1 (n >>= 1) to get: n = 0000_0000_0000_0000_0000_0000_0000_0110. We", "= 1 therefore result = result + 1 = 0000_0000_0000_0000_0000_0000_0000_0100", "0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 We right shift n by", "43261596 (represented in binary as 00000010100101000001111010011100), # return 964176192 (represented", "right shift n by 1 (n >>= 1) to get:", "= 13 = 0000_0000_0000_0000_0000_0000_0000_1101 Starting for loop: i = 0:", "result << 1 = 0000_0000_0000_0000_0000_0000_0000_0000. n&1 = 0000_0000_0000_0000_0000_0000_0000_1101 & 0000_0000_0000_0000_0000_0000_0000_0001", "padding 0 on the left side if (i < 31)", "the left by 1 bit. Then, if the last digit", "add 1 to result. To find the last digit of", "go to the next iteration. i = 1: result =", "therefore we don't increment result. We right shift n by", "0000_0000_0000_0000_0000_0000_0000_0000. n&1 = 0000_0000_0000_0000_0000_0000_0000_1101 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1", "& 001 = 001 = 1; however, if n =", "n by 1 to get: n = 0000_0000_0000_0000_0000_0000_0000_0000 = 0.", "Finally, we update n by shifting it to the right", "<reponame>JulyKikuAkita/PythonPrac __source__ = 'https://leetcode.com/problems/reverse-bits/description/' # https://github.com/kamyu104/LeetCode/blob/master/Python/reverse-bits.py # Time : O(n)", "shift n by 1 to get: n = 0000_0000_0000_0000_0000_0000_0000_0000 =", "0. We then iterate from 0 to 31 (an integer", "= ((n & 0x0F0F0F0F) << 4) | ((n & 0xF0F0F0F0)", "public int reverseBits(int n) { if (n == 0) return", "\"_\" is for readability), calling reverseBits(13) should return: 1011_0000_0000_0000_0000_0000_0000_0000 Here", "(i < 31) { // CATCH: for last digit, don't", "28 0's to the right of result. i.e at the", "(n & 1) Example, if n=5 (101), n&1 = 101", "return (n >>> 16) | (n << 16); } }", "# Number of 1 Bits # import unittest class Solution:", "== 1) result++; n >>= 1; } return result; }", "# Space: O(1) # Bit Manipulation # # Description: Leetcode", "result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0010; n&1 = 0000_0000_0000_0000_0000_0000_0000_0110", "# For example, given input 43261596 (represented in binary as", "n&1 = 0000_0000_0000_0000_0000_0000_0000_0011 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore", "result to the left by 1 digit. Since there we", "8) | ((n & 0xFF00FF00) >>> 8); return (n >>>", "= 0000_0000_0000_0000_0000_0000_0000_1101 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore result", "Bit Manipulation # Similar Questions # Number of 1 Bits", "integer def reverseBits(self, n): result = 0 for i in", "= result = 0000_0000_0000_0000_0000_0000_0000_0101 We right shift n by 1", "13 = 0000_0000_0000_0000_0000_0000_0000_1101 Starting for loop: i = 0: result", "100% class Solution { // you need treat n as", "iteration. i = 3: result = result << 1 =", "need treat n as an unsigned value public int reverseBits(int", "to get # 1ms 100% class Solution { // you", "Then, if the last digit of input n is 1,", "0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore result = result +", "1 Bits # import unittest class Solution: # @param n,", "TestMethods(unittest.TestCase): def test_Local(self): self.assertEqual(1, 1) print Solution().reverseBits(1) if __name__ ==", "to the left by 1 bit. Then, if the last", "{ if ((n & 1) != 0) { ret |=", "def reverseBits(self, n): result = 0 for i in xrange(32):", "1: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0010; n&1 =", "= 31 iterations left, this will result in padding 28", ">>> 4); n = ((n & 0x00FF00FF) << 8) |", "= 2: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0100. n&1", "= 1: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0010; n&1", "given 32 bits unsigned integer. # # For example, given", "intitialize result to 0. We then iterate from 0 to", "0000_0000_0000_0000_0000_0000_0000_0011. We then go to the next iteration. i =", "0000_0000_0000_0000_0000_0000_0000_0011 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore result =", "= ((n & 0x33333333) << 2) | ((n & 0xCCCCCCCC)", "up: # If this function is called many times, how", "n) { if (n == 0) return 0; int result", "res += n & 1 } n >>>= 1; //", "= 0000_0000_0000_0000_0000_0000_0000_0011 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore result", "0). Finally, we update n by shifting it to the", "Solution { // you need treat n as an unsigned", "int ret = 0; for (int i = 0; i", "n&1 = 0000_0000_0000_0000_0000_0000_0000_0110 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0000 = 0; therefore", "n. The only change will be for result <<=1, i.e.", "& 0x55555555) << 1) | ((n & 0xAAAAAAAA) >>> 1);", "(n >>= 1) to get: n = 0000_0000_0000_0000_0000_0000_0000_0011. We then", "the end of the iteration, n is 0, so (n&1)", "= 10 & 01 = 0). Finally, we update n", "0x00FF00FF) << 8) | ((n & 0xFF00FF00) >>> 8); return", "n by 1 to get: n = 0000_0000_0000_0000_0000_0000_0000_0001. We then", "to the right of result. i.e at the end, we", "} We first intitialize result to 0. We then iterate", "32; i++) { if ((n & 1) != 0) {", "{ int ret = 0; for (int i = 0;", "by 1 to get: n = 0000_0000_0000_0000_0000_0000_0000_0000 = 0. Now,", "| ((n & 0xF0F0F0F0) >>> 4); n = ((n &", "input n is 1, we add 1 to result. To", "self.assertEqual(1, 1) print Solution().reverseBits(1) if __name__ == '__main__': unittest.main() Java", "change will be for result <<=1, i.e. shifting result to", "https://github.com/kamyu104/LeetCode/blob/master/Python/reverse-bits.py # Time : O(n) # Space: O(1) # Bit", "= ''' # Thought: # 1ms 100% class Solution {", "from 0 to 31 (an integer has 32 bits). In", "00000010100101000001111010011100), # return 964176192 (represented in binary as 00111001011110000010100101000000). #", "# 1ms 100% class Solution { // you need treat", "0000_0000_0000_0000_0000_0000_0000_0001 = 1 We right shift n by 1 (n", "go to the next iteration. i = 2: result =", "as an unsigned value public int reverseBits(int n) { if", "(10), n&1 = 10 & 01 = 0). Finally, we", "+ 1 = 0000_0000_0000_0000_0000_0000_0000_0000 + 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1", "of result. i.e at the end, we get result =", "to the next iteration. i = 2: result = result", "= 0000_0000_0000_0000_0000_0000_0000_0000 = 0. Now, from here to the end", "result in padding 28 0's to the right of result.", "= 0000_0000_0000_0000_0000_0000_0000_1010. n&1 = 0000_0000_0000_0000_0000_0000_0000_0001 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 =", "shift n by 1 to get: n = 0000_0000_0000_0000_0000_0000_0000_0001. We", "unsigned value public int reverseBits(int n) { n = ((n", "0000_0000_0000_0000_0000_0000_0000_0101 We right shift n by 1 to get: n", "O(n) # Space: O(1) # Bit Manipulation # # Description:", "n&1 = 0000_0000_0000_0000_0000_0000_0000_1101 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore", "you optimize it? # # Companies # Apple Airbnb #", "1 = 0000_0000_0000_0000_0000_0000_0000_0100. n&1 = 0000_0000_0000_0000_0000_0000_0000_0011 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001", "= 0000_0000_0000_0000_0000_0000_0000_0001 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore result", "((n & 0xF0F0F0F0) >>> 4); n = ((n & 0x00FF00FF)", "i < 32; i++) { if ((n & 1) !=", "by 1 to get: n = 0000_0000_0000_0000_0000_0000_0000_0001. We then go", "and n >>=1 will not change n. The only change", "0000_0000_0000_0000_0000_0000_0000_0100 + 0000_0000_0000_0000_0000_0000_0000_0001 = result = 0000_0000_0000_0000_0000_0000_0000_0101 We right shift", "((n & 1) != 0) { ret |= 1; //same", "find the last digit of n, we just do: (n", "if n=5 (101), n&1 = 101 & 001 = 001", "then go to the next iteration. i = 1: result", "4) | ((n & 0xF0F0F0F0) >>> 4); n = ((n", "on the left side if (i < 31) { //", "0x33333333) << 2) | ((n & 0xCCCCCCCC) >>> 2); n", "0; i < 32; i++) { if ((n & 1)", "do: (n & 1) Example, if n=5 (101), n&1 =", "called many times, how would you optimize it? # #", "1) At the end of the iteration, we return result.", "= 0000_0000_0000_0000_0000_0000_0000_0001 = 1 We right shift n by 1", "Manipulation # Similar Questions # Number of 1 Bits #", "integer # @return an integer def reverseBits(self, n): result =", "Airbnb # Related Topics # Bit Manipulation # Similar Questions", "In each iteration: We first shift result to the left", "00111001011110000010100101000000). # # Follow up: # If this function is", "// CATCH: for last digit, don't shift! ret <<= 1;", "iteration, n is 0, so (n&1) will always be 0", "to the left by 1 digit. Since there we have", "Related Topics # Bit Manipulation # Similar Questions # Number", "'https://leetcode.com/problems/reverse-bits/description/' # https://github.com/kamyu104/LeetCode/blob/master/Python/reverse-bits.py # Time : O(n) # Space: O(1)", ">>> 2); n = ((n & 0x0F0F0F0F) << 4) |", "by shifting it to the right by 1 (n >>=", "== '__main__': unittest.main() Java = ''' # Thought: # 1ms", "+ 1 = 0000_0000_0000_0000_0000_0000_0000_0100 + 0000_0000_0000_0000_0000_0000_0000_0001 = result = 0000_0000_0000_0000_0000_0000_0000_0101", "Example, if n=5 (101), n&1 = 101 & 001 =", "= 0; therefore we don't increment result. We right shift", "((n & 0xFF00FF00) >>> 8); return (n >>> 16) |", "O(1) # Bit Manipulation # # Description: Leetcode # 190.", "result = result + 1 = 0000_0000_0000_0000_0000_0000_0000_0000 + 0000_0000_0000_0000_0000_0000_0000_0001 =", "0000_0000_0000_0000_0000_0000_0000_0001. We then go to the next iteration. i =", "as an unsigned value public int reverseBits(int n) { int", "right of result. i.e at the end, we get result", "<< 4) | ((n & 0xF0F0F0F0) >>> 4); n =", "therefore result = result + 1 = = 0000_0000_0000_0000_0000_0000_0000_1011 We", "{ n = ((n & 0x55555555) << 1) | ((n", "(represented in binary as 00000010100101000001111010011100), # return 964176192 (represented in", "result. We right shift n by 1 (n >>= 1)", "((n & 0x0F0F0F0F) << 4) | ((n & 0xF0F0F0F0) >>>", "@return an integer def reverseBits(self, n): result = 0 for", "# Description: Leetcode # 190. Reverse Bits # # Reverse", "result. To find the last digit of n, we just", "you need treat n as an unsigned value public int", "0; for (int i = 0; i < 32; i++)", "will be for result <<=1, i.e. shifting result to the", "n = 0000_0000_0000_0000_0000_0000_0000_0001. We then go to the next iteration.", "0000_0000_0000_0000_0000_0000_0000_0000 + 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 We right shift", "1 therefore result = result + 1 = 0000_0000_0000_0000_0000_0000_0000_0000 +", "class TestMethods(unittest.TestCase): def test_Local(self): self.assertEqual(1, 1) print Solution().reverseBits(1) if __name__", "1; however, if n = 2 (10), n&1 = 10", "iteration: We first shift result to the left by 1", "Solution: # @param n, an integer # @return an integer", "it? # # Companies # Apple Airbnb # Related Topics", "Bits # import unittest class Solution: # @param n, an", "= 1 therefore result = result + 1 = =", "we have i=4 to i = 31 iterations left, this", "| ((n & 0xCCCCCCCC) >>> 2); n = ((n &", "ret; } } We first intitialize result to 0. We", "< 31) { // CATCH: for last digit, don't shift!", ": O(n) # Space: O(1) # Bit Manipulation # #", "binary as 00000010100101000001111010011100), # return 964176192 (represented in binary as", "|= 1; //same as // res += n & 1", "for input n = 13: Initially, result = 0 =", "= result << 1 = 0000_0000_0000_0000_0000_0000_0000_1010. n&1 = 0000_0000_0000_0000_0000_0000_0000_0001 &", "of n, we just do: (n & 1) Example, if", "n) { n = ((n & 0x55555555) << 1) |", "@param n, an integer # @return an integer def reverseBits(self,", "<< 1 = 0000_0000_0000_0000_0000_0000_0000_1010. n&1 = 0000_0000_0000_0000_0000_0000_0000_0001 & 0000_0000_0000_0000_0000_0000_0000_0001 =", "readability), calling reverseBits(13) should return: 1011_0000_0000_0000_0000_0000_0000_0000 Here is how our", "will result in padding 28 0's to the right of", "10 & 01 = 0). Finally, we update n by", "is how our algorithm would work for input n =", "# @return an integer def reverseBits(self, n): result = 0", "reverseBits(int n) { if (n == 0) return 0; int", "= ((n & 0x00FF00FF) << 8) | ((n & 0xFF00FF00)", "n&1 = 0000_0000_0000_0000_0000_0000_0000_0001 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore", "!= 0) { ret |= 1; //same as // res", "therefore result = result + 1 = 0000_0000_0000_0000_0000_0000_0000_0000 + 0000_0000_0000_0000_0000_0000_0000_0001", "= result + 1 = 0000_0000_0000_0000_0000_0000_0000_0100 + 0000_0000_0000_0000_0000_0000_0000_0001 = result", "n = 0000_0000_0000_0000_0000_0000_0000_0011. We then go to the next iteration.", "Reverse bits of a given 32 bits unsigned integer. #", "i in xrange(32): result <<= 1 result |= n &", "n = 13 (represented in binary as 0000_0000_0000_0000_0000_0000_0000_1101, the \"_\"", "0; int result = 0; for (int i = 0;", "1 = 0000_0000_0000_0000_0000_0000_0000_0000 + 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 We", "1) Example, if n=5 (101), n&1 = 101 & 001", "result + 1 = 0000_0000_0000_0000_0000_0000_0000_0100 + 0000_0000_0000_0000_0000_0000_0000_0001 = result =", "= 3: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_1010. n&1", "== 0) return 0; int result = 0; for (int", "of input n is 1, we add 1 to result.", "{ if (n == 0) return 0; int result =", "public int reverseBits(int n) { n = ((n & 0x55555555)", "0x55555555) << 1) | ((n & 0xAAAAAAAA) >>> 1); n", "n is 1, we add 1 to result. To find", "for last digit, don't shift! ret <<= 1; } }", "we return result. Example, if input n = 13 (represented", "1011_0000_0000_0000_0000_0000_0000_0000 Here is how our algorithm would work for input", "} n >>>= 1; // padding 0 on the left", "i = 0: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0000.", "# Related Topics # Bit Manipulation # Similar Questions #", "result = result + 1 = = 0000_0000_0000_0000_0000_0000_0000_1011 We right", "''' # Thought: # 1ms 100% class Solution { //", "Since there we have i=4 to i = 31 iterations", "# # Reverse bits of a given 32 bits unsigned", "to the next iteration. i = 3: result = result", "this function is called many times, how would you optimize", "0000_0000_0000_0000_0000_0000_0000_1010. n&1 = 0000_0000_0000_0000_0000_0000_0000_0001 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1", "= 2 (10), n&1 = 10 & 01 = 0).", "result = 0 = 0000_0000_0000_0000_0000_0000_0000_0000, n = 13 = 0000_0000_0000_0000_0000_0000_0000_1101", "if ((n & 1) != 0) { ret |= 1;", "(n >>> 16) | (n << 16); } } '''", "reverseBits(self, n): result = 0 for i in xrange(32): result", "13 (represented in binary as 0000_0000_0000_0000_0000_0000_0000_1101, the \"_\" is for", "of 1 Bits # import unittest class Solution: # @param", "the last digit of n, we just do: (n &", "0xFF00FF00) >>> 8); return (n >>> 16) | (n <<", "0x0F0F0F0F) << 4) | ((n & 0xF0F0F0F0) >>> 4); n", "| ((n & 0xAAAAAAAA) >>> 1); n = ((n &", "class Solution { // you need treat n as an", "(n == 0) return 0; int result = 0; for", "if the last digit of input n is 1, we", "Here is how our algorithm would work for input n", "i = 2: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0100.", "Questions # Number of 1 Bits # import unittest class", "not change n. The only change will be for result", "{ // you need treat n as an unsigned value", "# 190. Reverse Bits # # Reverse bits of a", "result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0100. n&1 = 0000_0000_0000_0000_0000_0000_0000_0011", "<<= 1; } } return ret; } } We first", "= 0000_0000_0000_0000_0000_0000_0000_0001. We then go to the next iteration. i", "31 (an integer has 32 bits). In each iteration: We", "iterate from 0 to 31 (an integer has 32 bits).", "0; i < 32; i++) { result <<= 1; if", "# Thought: # 1ms 100% class Solution { // you", "# https://github.com/kamyu104/LeetCode/blob/master/Python/reverse-bits.py # Time : O(n) # Space: O(1) #", "# # Description: Leetcode # 190. Reverse Bits # #", ">>= 1) to get: n = 0000_0000_0000_0000_0000_0000_0000_0110. We then go", "1 = 0000_0000_0000_0000_0000_0000_0000_0010; n&1 = 0000_0000_0000_0000_0000_0000_0000_0110 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0000", "should return: 1011_0000_0000_0000_0000_0000_0000_0000 Here is how our algorithm would work", "return result; } } # 1ms 100% class Solution {", "<< 1 = 0000_0000_0000_0000_0000_0000_0000_0100. n&1 = 0000_0000_0000_0000_0000_0000_0000_0011 & 0000_0000_0000_0000_0000_0000_0000_0001 =", "algorithm would work for input n = 13: Initially, result", "int reverseBits(int n) { if (n == 0) return 0;", "right shift n by 1 to get: n = 0000_0000_0000_0000_0000_0000_0000_0001.", "be for result <<=1, i.e. shifting result to the left", "1; //same as // res += n & 1 }", "} } return ret; } } We first intitialize result", "get result = 1011_0000_0000_0000_0000_0000_0000_0000 This is exactly what we expected", "1 = 0000_0000_0000_0000_0000_0000_0000_1010. n&1 = 0000_0000_0000_0000_0000_0000_0000_0001 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001", "n >>= 1 return result class TestMethods(unittest.TestCase): def test_Local(self): self.assertEqual(1,", "result to 0. We then iterate from 0 to 31", "at the end, we get result = 1011_0000_0000_0000_0000_0000_0000_0000 This is", "right by 1 (n >>= 1) At the end of", "0000_0000_0000_0000_0000_0000_0000_1101 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore result =", "result << 1 = 0000_0000_0000_0000_0000_0000_0000_1010. n&1 = 0000_0000_0000_0000_0000_0000_0000_0001 & 0000_0000_0000_0000_0000_0000_0000_0001", "i = 0; i < 32; i++) { result <<=", "times, how would you optimize it? # # Companies #", "= 0 = 0000_0000_0000_0000_0000_0000_0000_0000, n = 13 = 0000_0000_0000_0000_0000_0000_0000_1101 Starting", "if __name__ == '__main__': unittest.main() Java = ''' # Thought:", "what we expected to get # 1ms 100% class Solution", "an unsigned value public int reverseBits(int n) { n =", "be 0 and n >>=1 will not change n. The", "have i=4 to i = 31 iterations left, this will", "n = ((n & 0x0F0F0F0F) << 4) | ((n &", "the last digit of input n is 1, we add", "= 0; i < 32; i++) { if ((n &", "result << 1 = 0000_0000_0000_0000_0000_0000_0000_0100. n&1 = 0000_0000_0000_0000_0000_0000_0000_0011 & 0000_0000_0000_0000_0000_0000_0000_0001", "will always be 0 and n >>=1 will not change", "i++) { if ((n & 1) != 0) { ret", "go to the next iteration. i = 3: result =", "result = 0000_0000_0000_0000_0000_0000_0000_0101 We right shift n by 1 to", "test_Local(self): self.assertEqual(1, 1) print Solution().reverseBits(1) if __name__ == '__main__': unittest.main()", "& 0xAAAAAAAA) >>> 1); n = ((n & 0x33333333) <<", "& 1 } n >>>= 1; // padding 0 on", ">>=1 will not change n. The only change will be", "001 = 001 = 1; however, if n = 2", "101 & 001 = 001 = 1; however, if n", "(int i = 0; i < 32; i++) { if", "2) | ((n & 0xCCCCCCCC) >>> 2); n = ((n", "(int i = 0; i < 32; i++) { result", "as 00111001011110000010100101000000). # # Follow up: # If this function", "in xrange(32): result <<= 1 result |= n & 1", "0000_0000_0000_0000_0000_0000_0000_0000, n = 13 = 0000_0000_0000_0000_0000_0000_0000_1101 Starting for loop: i", "last digit of input n is 1, we add 1", "= 13 (represented in binary as 0000_0000_0000_0000_0000_0000_0000_1101, the \"_\" is", "i = 1: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_0010;", "is 0, so (n&1) will always be 0 and n", "{ // CATCH: for last digit, don't shift! ret <<=", "} return ret; } } We first intitialize result to", "don't shift! ret <<= 1; } } return ret; }", "shift result to the left by 1 bit. Then, if", "1 } n >>>= 1; // padding 0 on the", "1 to result. To find the last digit of n,", "= 0000_0000_0000_0000_0000_0000_0000_0010; n&1 = 0000_0000_0000_0000_0000_0000_0000_0110 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0000 =", "We first shift result to the left by 1 bit.", "result. Example, if input n = 13 (represented in binary", "print Solution().reverseBits(1) if __name__ == '__main__': unittest.main() Java = '''", "first intitialize result to 0. We then iterate from 0", "increment result. We right shift n by 1 (n >>=", "Space: O(1) # Bit Manipulation # # Description: Leetcode #", "get # 1ms 100% class Solution { // you need", "1) print Solution().reverseBits(1) if __name__ == '__main__': unittest.main() Java =", "our algorithm would work for input n = 13: Initially,", "We right shift n by 1 (n >>= 1) to", "reverseBits(int n) { n = ((n & 0x55555555) << 1)", "result to the left by 1 bit. Then, if the", "Bits # # Reverse bits of a given 32 bits", "however, if n = 2 (10), n&1 = 10 &", "change n. The only change will be for result <<=1,", "get: n = 0000_0000_0000_0000_0000_0000_0000_0000 = 0. Now, from here to", "1) to get: n = 0000_0000_0000_0000_0000_0000_0000_0110. We then go to", "0000_0000_0000_0000_0000_0000_0000_0110. We then go to the next iteration. i =", "Leetcode # 190. Reverse Bits # # Reverse bits of", "<< 8) | ((n & 0xFF00FF00) >>> 8); return (n", "result = 0; for (int i = 0; i <", "1) | ((n & 0xAAAAAAAA) >>> 1); n = ((n", "Number of 1 Bits # import unittest class Solution: #", "binary as 00111001011110000010100101000000). # # Follow up: # If this", "result = 0 for i in xrange(32): result <<= 1", "bits unsigned integer. # # For example, given input 43261596", "1ms 100% class Solution { // you need treat n", "bits). In each iteration: We first shift result to the", "as // res += n & 1 } n >>>=", "0) { ret |= 1; //same as // res +=", "0 = 0000_0000_0000_0000_0000_0000_0000_0000, n = 13 = 0000_0000_0000_0000_0000_0000_0000_1101 Starting for", "1 = 0000_0000_0000_0000_0000_0000_0000_0000. n&1 = 0000_0000_0000_0000_0000_0000_0000_1101 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001", "# Similar Questions # Number of 1 Bits # import", "0000_0000_0000_0000_0000_0000_0000_0110 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0000 = 0; therefore we don't", "n & 1 } n >>>= 1; // padding 0", "the next iteration. i = 3: result = result <<", "& 0x33333333) << 2) | ((n & 0xCCCCCCCC) >>> 2);", "(n >>= 1) to get: n = 0000_0000_0000_0000_0000_0000_0000_0110. We then", "n is 0, so (n&1) will always be 0 and", "= 0000_0000_0000_0000_0000_0000_0000_0100. n&1 = 0000_0000_0000_0000_0000_0000_0000_0011 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 =", "to get: n = 0000_0000_0000_0000_0000_0000_0000_0011. We then go to the", "there we have i=4 to i = 31 iterations left,", "& 1) != 0) { ret |= 1; //same as", "To find the last digit of n, we just do:", "((n & 0x55555555) << 1) | ((n & 0xAAAAAAAA) >>>", "= 0). Finally, we update n by shifting it to", "n = 13 = 0000_0000_0000_0000_0000_0000_0000_1101 Starting for loop: i =", "} } We first intitialize result to 0. We then", "n by shifting it to the right by 1 (n", "iteration. i = 2: result = result << 1 =", "class Solution: # @param n, an integer # @return an", "an unsigned value public int reverseBits(int n) { int ret", ">>= 1 return result class TestMethods(unittest.TestCase): def test_Local(self): self.assertEqual(1, 1)", "& 1) == 1) result++; n >>= 1; } return", "result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_1010. n&1 = 0000_0000_0000_0000_0000_0000_0000_0001", "1 to get: n = 0000_0000_0000_0000_0000_0000_0000_0001. We then go to", "the left side if (i < 31) { // CATCH:", "shifting it to the right by 1 (n >>= 1)", "the end of the iteration, we return result. Example, if", "if n = 2 (10), n&1 = 10 & 01", "end of the iteration, we return result. Example, if input", "i < 32; i++) { result <<= 1; if ((n", "<< 1) | ((n & 0xAAAAAAAA) >>> 1); n =", "result++; n >>= 1; } return result; } } #", "0 for i in xrange(32): result <<= 1 result |=", "for loop: i = 0: result = result << 1", "= result + 1 = = 0000_0000_0000_0000_0000_0000_0000_1011 We right shift", "0000_0000_0000_0000_0000_0000_0000_0000 = 0; therefore we don't increment result. We right", "We then iterate from 0 to 31 (an integer has", "3: result = result << 1 = 0000_0000_0000_0000_0000_0000_0000_1010. n&1 =", "= result << 1 = 0000_0000_0000_0000_0000_0000_0000_0010; n&1 = 0000_0000_0000_0000_0000_0000_0000_0110 &", "the iteration, n is 0, so (n&1) will always be", "next iteration. i = 1: result = result << 1", "then go to the next iteration. i = 2: result", "we expected to get # 1ms 100% class Solution {", "don't increment result. We right shift n by 1 (n", "would you optimize it? # # Companies # Apple Airbnb", "= 0000_0000_0000_0000_0000_0000_0000_0000 = 0; therefore we don't increment result. We", "& 1 n >>= 1 return result class TestMethods(unittest.TestCase): def", "we update n by shifting it to the right by", "0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore result = result + 1 =", "= 0000_0000_0000_0000_0000_0000_0000_0110. We then go to the next iteration. i", "8); return (n >>> 16) | (n << 16); }", "+ 0000_0000_0000_0000_0000_0000_0000_0001 = result = 0000_0000_0000_0000_0000_0000_0000_0101 We right shift n", "# # For example, given input 43261596 (represented in binary", "result |= n & 1 n >>= 1 return result", "left side if (i < 31) { // CATCH: for", "it to the right by 1 (n >>= 1) At", "1 (n >>= 1) to get: n = 0000_0000_0000_0000_0000_0000_0000_0011. We", "= 0000_0000_0000_0000_0000_0000_0000_0100 + 0000_0000_0000_0000_0000_0000_0000_0001 = result = 0000_0000_0000_0000_0000_0000_0000_0101 We right", "We then go to the next iteration. i = 3:", "< 32; i++) { result <<= 1; if ((n &", "1; } } return ret; } } We first intitialize", "0000_0000_0000_0000_0000_0000_0000_0100. n&1 = 0000_0000_0000_0000_0000_0000_0000_0011 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1", "31 iterations left, this will result in padding 28 0's", "= 0000_0000_0000_0000_0000_0000_0000_0011. We then go to the next iteration. i", "32 bits). In each iteration: We first shift result to", "is exactly what we expected to get # 1ms 100%", "= 'https://leetcode.com/problems/reverse-bits/description/' # https://github.com/kamyu104/LeetCode/blob/master/Python/reverse-bits.py # Time : O(n) # Space:", "1 n >>= 1 return result class TestMethods(unittest.TestCase): def test_Local(self):", "1 return result class TestMethods(unittest.TestCase): def test_Local(self): self.assertEqual(1, 1) print", "n >>= 1; } return result; } } # 1ms", "= = 0000_0000_0000_0000_0000_0000_0000_1011 We right shift n by 1 to", "optimize it? # # Companies # Apple Airbnb # Related", "1) != 0) { ret |= 1; //same as //", "reverseBits(int n) { int ret = 0; for (int i", "to result. To find the last digit of n, we", "n by 1 (n >>= 1) to get: n =", "if input n = 13 (represented in binary as 0000_0000_0000_0000_0000_0000_0000_1101,", "001 = 1; however, if n = 2 (10), n&1", "} } # 1ms 100% class Solution { // you", "// res += n & 1 } n >>>= 1;", "we add 1 to result. To find the last digit", "we don't increment result. We right shift n by 1", "ret |= 1; //same as // res += n &", "in binary as 00111001011110000010100101000000). # # Follow up: # If", "def test_Local(self): self.assertEqual(1, 1) print Solution().reverseBits(1) if __name__ == '__main__':", "1 therefore result = result + 1 = 0000_0000_0000_0000_0000_0000_0000_0100 +", "is for readability), calling reverseBits(13) should return: 1011_0000_0000_0000_0000_0000_0000_0000 Here is", "result <<= 1 result |= n & 1 n >>=", "how our algorithm would work for input n = 13:", "Time : O(n) # Space: O(1) # Bit Manipulation #", "n&1 = 101 & 001 = 001 = 1; however,", "<< 1 = 0000_0000_0000_0000_0000_0000_0000_0010; n&1 = 0000_0000_0000_0000_0000_0000_0000_0110 & 0000_0000_0000_0000_0000_0000_0000_0001 =", "as an unsigned value public int reverseBits(int n) { n", "2 (10), n&1 = 10 & 01 = 0). Finally,", "ret <<= 1; } } return ret; } } We", "((n & 0x00FF00FF) << 8) | ((n & 0xFF00FF00) >>>", "Reverse Bits # # Reverse bits of a given 32", "= result << 1 = 0000_0000_0000_0000_0000_0000_0000_0100. n&1 = 0000_0000_0000_0000_0000_0000_0000_0011 &", "to the right by 1 (n >>= 1) At the", "Similar Questions # Number of 1 Bits # import unittest", "an integer def reverseBits(self, n): result = 0 for i", "+ 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 We right shift n", "result <<=1, i.e. shifting result to the left by 1", "0xAAAAAAAA) >>> 1); n = ((n & 0x33333333) << 2)", ">>= 1; } return result; } } # 1ms 100%", "# @param n, an integer # @return an integer def", "work for input n = 13: Initially, result = 0", "of the iteration, n is 0, so (n&1) will always", "if (n == 0) return 0; int result = 0;", "< 32; i++) { if ((n & 1) != 0)", "4); n = ((n & 0x00FF00FF) << 8) | ((n", "in binary as 0000_0000_0000_0000_0000_0000_0000_1101, the \"_\" is for readability), calling", "0; therefore we don't increment result. We right shift n", "//same as // res += n & 1 } n", "digit of input n is 1, we add 1 to", "{ result <<= 1; if ((n & 1) == 1)", "ret = 0; for (int i = 0; i <", "to the end of the iteration, n is 0, so", "return: 1011_0000_0000_0000_0000_0000_0000_0000 Here is how our algorithm would work for", "result. i.e at the end, we get result = 1011_0000_0000_0000_0000_0000_0000_0000", "Description: Leetcode # 190. Reverse Bits # # Reverse bits", "n as an unsigned value public int reverseBits(int n) {", "next iteration. i = 2: result = result << 1", "to 0. We then iterate from 0 to 31 (an", "| ((n & 0xFF00FF00) >>> 8); return (n >>> 16)", "input 43261596 (represented in binary as 00000010100101000001111010011100), # return 964176192", "1; } return result; } } # 1ms 100% class", "= 0; for (int i = 0; i < 32;", "therefore result = result + 1 = 0000_0000_0000_0000_0000_0000_0000_0100 + 0000_0000_0000_0000_0000_0000_0000_0001", "0000_0000_0000_0000_0000_0000_0000_1011 We right shift n by 1 to get: n", "# Bit Manipulation # Similar Questions # Number of 1", "// padding 0 on the left side if (i <", "# # Companies # Apple Airbnb # Related Topics #", ">>= 1) to get: n = 0000_0000_0000_0000_0000_0000_0000_0011. We then go", "by 1 (n >>= 1) to get: n = 0000_0000_0000_0000_0000_0000_0000_0110.", "For example, given input 43261596 (represented in binary as 00000010100101000001111010011100),", "here to the end of the iteration, n is 0,", "always be 0 and n >>=1 will not change n.", "for i in xrange(32): result <<= 1 result |= n", "is called many times, how would you optimize it? #", "public int reverseBits(int n) { int ret = 0; for", "to get: n = 0000_0000_0000_0000_0000_0000_0000_0110. We then go to the", "= 1; however, if n = 2 (10), n&1 =", "left by 1 bit. Then, if the last digit of", "= 0000_0000_0000_0000_0000_0000_0000_0110 & 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0000 = 0; therefore we", "n, we just do: (n & 1) Example, if n=5", "if ((n & 1) == 1) result++; n >>= 1;", "if (i < 31) { // CATCH: for last digit,", "has 32 bits). In each iteration: We first shift result", "0000_0000_0000_0000_0000_0000_0000_1101, the \"_\" is for readability), calling reverseBits(13) should return:", "as 00000010100101000001111010011100), # return 964176192 (represented in binary as 00111001011110000010100101000000).", "Starting for loop: i = 0: result = result <<", "|= n & 1 n >>= 1 return result class", "result + 1 = 0000_0000_0000_0000_0000_0000_0000_0000 + 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 =", "digit. Since there we have i=4 to i = 31", "& 0x0F0F0F0F) << 4) | ((n & 0xF0F0F0F0) >>> 4);", "= 1011_0000_0000_0000_0000_0000_0000_0000 This is exactly what we expected to get", "13: Initially, result = 0 = 0000_0000_0000_0000_0000_0000_0000_0000, n = 13", "we just do: (n & 1) Example, if n=5 (101),", "n = 0000_0000_0000_0000_0000_0000_0000_0000 = 0. Now, from here to the", "int result = 0; for (int i = 0; i", "i=4 to i = 31 iterations left, this will result", "padding 28 0's to the right of result. i.e at", "digit, don't shift! ret <<= 1; } } return ret;", "is 1, we add 1 to result. To find the", "32 bits unsigned integer. # # For example, given input", "end, we get result = 1011_0000_0000_0000_0000_0000_0000_0000 This is exactly what", "unsigned value public int reverseBits(int n) { int ret =", "int reverseBits(int n) { n = ((n & 0x55555555) <<", "190. Reverse Bits # # Reverse bits of a given", "= result << 1 = 0000_0000_0000_0000_0000_0000_0000_0000. n&1 = 0000_0000_0000_0000_0000_0000_0000_1101 &", "Java = ''' # Thought: # 1ms 100% class Solution", "1011_0000_0000_0000_0000_0000_0000_0000 This is exactly what we expected to get #", "1 to get: n = 0000_0000_0000_0000_0000_0000_0000_0000 = 0. Now, from", "then iterate from 0 to 31 (an integer has 32", "= ((n & 0x55555555) << 1) | ((n & 0xAAAAAAAA)", "reverseBits(13) should return: 1011_0000_0000_0000_0000_0000_0000_0000 Here is how our algorithm would", "= 0000_0000_0000_0000_0000_0000_0000_1011 We right shift n by 1 to get:", "1) to get: n = 0000_0000_0000_0000_0000_0000_0000_0011. We then go to", "= result + 1 = 0000_0000_0000_0000_0000_0000_0000_0000 + 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001", "CATCH: for last digit, don't shift! ret <<= 1; }", "result = result + 1 = 0000_0000_0000_0000_0000_0000_0000_0100 + 0000_0000_0000_0000_0000_0000_0000_0001 =", "= 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore result = result + 1", "& 0000_0000_0000_0000_0000_0000_0000_0001 = 0000_0000_0000_0000_0000_0000_0000_0001 = 1 therefore result = result", "= 1 therefore result = result + 1 = 0000_0000_0000_0000_0000_0000_0000_0000", "end of the iteration, n is 0, so (n&1) will", "last digit of n, we just do: (n & 1)", "the right by 1 (n >>= 1) At the end", "next iteration. i = 3: result = result << 1", "& 1) Example, if n=5 (101), n&1 = 101 &", "shift! ret <<= 1; } } return ret; } }", "The only change will be for result <<=1, i.e. shifting", "0xF0F0F0F0) >>> 4); n = ((n & 0x00FF00FF) << 8)", "<< 2) | ((n & 0xCCCCCCCC) >>> 2); n =", "return result. Example, if input n = 13 (represented in", "0, so (n&1) will always be 0 and n >>=1", "i.e. shifting result to the left by 1 digit. Since", "0xCCCCCCCC) >>> 2); n = ((n & 0x0F0F0F0F) << 4)", "get: n = 0000_0000_0000_0000_0000_0000_0000_0001. We then go to the next", "result <<= 1; if ((n & 1) == 1) result++;", "1; // padding 0 on the left side if (i", "import unittest class Solution: # @param n, an integer #", "31) { // CATCH: for last digit, don't shift! ret" ]
[ "the last in the module, exactly as formatted: __version__ =", "be the last in the module, exactly as formatted: __version__", "*must* be the last in the module, exactly as formatted:", "information.\"\"\" # The following line *must* be the last in", "\"\"\"Version information.\"\"\" # The following line *must* be the last", "following line *must* be the last in the module, exactly", "line *must* be the last in the module, exactly as", "last in the module, exactly as formatted: __version__ = \"0.16.1\"", "# The following line *must* be the last in the", "<gh_stars>0 \"\"\"Version information.\"\"\" # The following line *must* be the", "The following line *must* be the last in the module," ]
[ "PdfFileWriter import re range_pattern = re.compile(r'(\\d+)(\\.\\.|-)(\\d+)') comma_pattern = re.compile('\\d+(,\\d+)*') def", "\"usage examle:\\npython3 selective_merge_pdf.py file1.pdf 1-3 file2.pdf 3,4,10 file1.pdf 50\" assert(len(argv)", "groups = range_pattern.search(pages_str) if groups: start = int(groups.group(1)) end =", "= argv[2::2] pdf_writer = PdfFileWriter() for file_name, pages in zip(files_names,", "last_page_index, pages)) for page in pages_to_add: pdf_writer.addPage(pdf_reader.getPage(page - 1)) with", "comma_pattern = re.compile('\\d+(,\\d+)*') def pages_args_to_array(pages_str): groups = range_pattern.search(pages_str) if groups:", "int(groups.group(3)) return list(range(start, end + 1)) elif comma_pattern.search(pages_str): return [int(d)", "= pages_args_to_array(pages) pages_to_add = list(filter(lambda i: i >= 0 and", "1)) elif comma_pattern.search(pages_str): return [int(d) for d in pages_str.split(',')] else:", "+ 1)) elif comma_pattern.search(pages_str): return [int(d) for d in pages_str.split(',')]", "% 2 == 1), \"invalid arguments; supply page numbers after", "1), \"usage examle:\\npython3 selective_merge_pdf.py file1.pdf 1-3 file2.pdf 3,4,10 file1.pdf 50\"", "0 and i <= last_page_index, pages)) for page in pages_to_add:", "re.compile('\\d+(,\\d+)*') def pages_args_to_array(pages_str): groups = range_pattern.search(pages_str) if groups: start =", "= re.compile(r'(\\d+)(\\.\\.|-)(\\d+)') comma_pattern = re.compile('\\d+(,\\d+)*') def pages_args_to_array(pages_str): groups = range_pattern.search(pages_str)", "examle:\\npython3 selective_merge_pdf.py file1.pdf 1-3 file2.pdf 3,4,10 file1.pdf 50\" assert(len(argv) %", "PdfFileWriter() for file_name, pages in zip(files_names, pages_args): pdf_reader = PdfFileReader(file_name)", "= PdfFileWriter() for file_name, pages in zip(files_names, pages_args): pdf_reader =", "i: i >= 0 and i <= last_page_index, pages)) for", "raise Exception('pages should be like 1,2,3 or 1-3, but was", "and i <= last_page_index, pages)) for page in pages_to_add: pdf_writer.addPage(pdf_reader.getPage(page", "elif comma_pattern.search(pages_str): return [int(d) for d in pages_str.split(',')] else: raise", "Exception('pages should be like 1,2,3 or 1-3, but was {}'", "= int(groups.group(3)) return list(range(start, end + 1)) elif comma_pattern.search(pages_str): return", "= range_pattern.search(pages_str) if groups: start = int(groups.group(1)) end = int(groups.group(3))", "return [int(d) for d in pages_str.split(',')] else: raise Exception('pages should", "pdf name\" files_names = argv[1::2] pages_args = argv[2::2] pdf_writer =", "start = int(groups.group(1)) end = int(groups.group(3)) return list(range(start, end +", "file_name, pages in zip(files_names, pages_args): pdf_reader = PdfFileReader(file_name) last_page_index =", "from sys import argv from PyPDF2 import PdfFileReader, PdfFileWriter import", "argv from PyPDF2 import PdfFileReader, PdfFileWriter import re range_pattern =", "PdfFileReader, PdfFileWriter import re range_pattern = re.compile(r'(\\d+)(\\.\\.|-)(\\d+)') comma_pattern = re.compile('\\d+(,\\d+)*')", "'__main__': assert(len(argv) > 1), \"usage examle:\\npython3 selective_merge_pdf.py file1.pdf 1-3 file2.pdf", "range_pattern.search(pages_str) if groups: start = int(groups.group(1)) end = int(groups.group(3)) return", "re.compile(r'(\\d+)(\\.\\.|-)(\\d+)') comma_pattern = re.compile('\\d+(,\\d+)*') def pages_args_to_array(pages_str): groups = range_pattern.search(pages_str) if", "== 1), \"invalid arguments; supply page numbers after each pdf", "pages in zip(files_names, pages_args): pdf_reader = PdfFileReader(file_name) last_page_index = pdf_reader.getNumPages()", "2 == 1), \"invalid arguments; supply page numbers after each", "assert(len(argv) > 1), \"usage examle:\\npython3 selective_merge_pdf.py file1.pdf 1-3 file2.pdf 3,4,10", "last_page_index = pdf_reader.getNumPages() pages = pages_args_to_array(pages) pages_to_add = list(filter(lambda i:", "file1.pdf 50\" assert(len(argv) % 2 == 1), \"invalid arguments; supply", "3,4,10 file1.pdf 50\" assert(len(argv) % 2 == 1), \"invalid arguments;", "1), \"invalid arguments; supply page numbers after each pdf name\"", "1-3, but was {}' .format(pages_str)) if __name__ == '__main__': assert(len(argv)", "but was {}' .format(pages_str)) if __name__ == '__main__': assert(len(argv) >", "each pdf name\" files_names = argv[1::2] pages_args = argv[2::2] pdf_writer", "pages = pages_args_to_array(pages) pages_to_add = list(filter(lambda i: i >= 0", "pages_args_to_array(pages) pages_to_add = list(filter(lambda i: i >= 0 and i", "comma_pattern.search(pages_str): return [int(d) for d in pages_str.split(',')] else: raise Exception('pages", "like 1,2,3 or 1-3, but was {}' .format(pages_str)) if __name__", "== '__main__': assert(len(argv) > 1), \"usage examle:\\npython3 selective_merge_pdf.py file1.pdf 1-3", "import PdfFileReader, PdfFileWriter import re range_pattern = re.compile(r'(\\d+)(\\.\\.|-)(\\d+)') comma_pattern =", "pages_to_add = list(filter(lambda i: i >= 0 and i <=", "or 1-3, but was {}' .format(pages_str)) if __name__ == '__main__':", "groups: start = int(groups.group(1)) end = int(groups.group(3)) return list(range(start, end", "= int(groups.group(1)) end = int(groups.group(3)) return list(range(start, end + 1))", ">= 0 and i <= last_page_index, pages)) for page in", "pages_to_add: pdf_writer.addPage(pdf_reader.getPage(page - 1)) with open(\"merged.pdf\", 'wb') as out: pdf_writer.write(out)", "should be like 1,2,3 or 1-3, but was {}' .format(pages_str))", "> 1), \"usage examle:\\npython3 selective_merge_pdf.py file1.pdf 1-3 file2.pdf 3,4,10 file1.pdf", "file2.pdf 3,4,10 file1.pdf 50\" assert(len(argv) % 2 == 1), \"invalid", "1-3 file2.pdf 3,4,10 file1.pdf 50\" assert(len(argv) % 2 == 1),", "i >= 0 and i <= last_page_index, pages)) for page", "sys import argv from PyPDF2 import PdfFileReader, PdfFileWriter import re", "int(groups.group(1)) end = int(groups.group(3)) return list(range(start, end + 1)) elif", "in pages_to_add: pdf_writer.addPage(pdf_reader.getPage(page - 1)) with open(\"merged.pdf\", 'wb') as out:", "files_names = argv[1::2] pages_args = argv[2::2] pdf_writer = PdfFileWriter() for", "was {}' .format(pages_str)) if __name__ == '__main__': assert(len(argv) > 1),", "in zip(files_names, pages_args): pdf_reader = PdfFileReader(file_name) last_page_index = pdf_reader.getNumPages() pages", "d in pages_str.split(',')] else: raise Exception('pages should be like 1,2,3", "{}' .format(pages_str)) if __name__ == '__main__': assert(len(argv) > 1), \"usage", "pdf_reader.getNumPages() pages = pages_args_to_array(pages) pages_to_add = list(filter(lambda i: i >=", "list(filter(lambda i: i >= 0 and i <= last_page_index, pages))", "= pdf_reader.getNumPages() pages = pages_args_to_array(pages) pages_to_add = list(filter(lambda i: i", "for d in pages_str.split(',')] else: raise Exception('pages should be like", "for page in pages_to_add: pdf_writer.addPage(pdf_reader.getPage(page - 1)) with open(\"merged.pdf\", 'wb')", "pdf_reader = PdfFileReader(file_name) last_page_index = pdf_reader.getNumPages() pages = pages_args_to_array(pages) pages_to_add", "= list(filter(lambda i: i >= 0 and i <= last_page_index,", "re range_pattern = re.compile(r'(\\d+)(\\.\\.|-)(\\d+)') comma_pattern = re.compile('\\d+(,\\d+)*') def pages_args_to_array(pages_str): groups", "= PdfFileReader(file_name) last_page_index = pdf_reader.getNumPages() pages = pages_args_to_array(pages) pages_to_add =", "argv[1::2] pages_args = argv[2::2] pdf_writer = PdfFileWriter() for file_name, pages", "list(range(start, end + 1)) elif comma_pattern.search(pages_str): return [int(d) for d", "page numbers after each pdf name\" files_names = argv[1::2] pages_args", "zip(files_names, pages_args): pdf_reader = PdfFileReader(file_name) last_page_index = pdf_reader.getNumPages() pages =", "range_pattern = re.compile(r'(\\d+)(\\.\\.|-)(\\d+)') comma_pattern = re.compile('\\d+(,\\d+)*') def pages_args_to_array(pages_str): groups =", "return list(range(start, end + 1)) elif comma_pattern.search(pages_str): return [int(d) for", "name\" files_names = argv[1::2] pages_args = argv[2::2] pdf_writer = PdfFileWriter()", "\"invalid arguments; supply page numbers after each pdf name\" files_names", "page in pages_to_add: pdf_writer.addPage(pdf_reader.getPage(page - 1)) with open(\"merged.pdf\", 'wb') as", "PyPDF2 import PdfFileReader, PdfFileWriter import re range_pattern = re.compile(r'(\\d+)(\\.\\.|-)(\\d+)') comma_pattern", "[int(d) for d in pages_str.split(',')] else: raise Exception('pages should be", "pages_args): pdf_reader = PdfFileReader(file_name) last_page_index = pdf_reader.getNumPages() pages = pages_args_to_array(pages)", "<= last_page_index, pages)) for page in pages_to_add: pdf_writer.addPage(pdf_reader.getPage(page - 1))", "= re.compile('\\d+(,\\d+)*') def pages_args_to_array(pages_str): groups = range_pattern.search(pages_str) if groups: start", "end + 1)) elif comma_pattern.search(pages_str): return [int(d) for d in", "arguments; supply page numbers after each pdf name\" files_names =", "argv[2::2] pdf_writer = PdfFileWriter() for file_name, pages in zip(files_names, pages_args):", "__name__ == '__main__': assert(len(argv) > 1), \"usage examle:\\npython3 selective_merge_pdf.py file1.pdf", "assert(len(argv) % 2 == 1), \"invalid arguments; supply page numbers", ".format(pages_str)) if __name__ == '__main__': assert(len(argv) > 1), \"usage examle:\\npython3", "pages_args = argv[2::2] pdf_writer = PdfFileWriter() for file_name, pages in", "be like 1,2,3 or 1-3, but was {}' .format(pages_str)) if", "after each pdf name\" files_names = argv[1::2] pages_args = argv[2::2]", "i <= last_page_index, pages)) for page in pages_to_add: pdf_writer.addPage(pdf_reader.getPage(page -", "selective_merge_pdf.py file1.pdf 1-3 file2.pdf 3,4,10 file1.pdf 50\" assert(len(argv) % 2", "50\" assert(len(argv) % 2 == 1), \"invalid arguments; supply page", "PdfFileReader(file_name) last_page_index = pdf_reader.getNumPages() pages = pages_args_to_array(pages) pages_to_add = list(filter(lambda", "1,2,3 or 1-3, but was {}' .format(pages_str)) if __name__ ==", "end = int(groups.group(3)) return list(range(start, end + 1)) elif comma_pattern.search(pages_str):", "pdf_writer = PdfFileWriter() for file_name, pages in zip(files_names, pages_args): pdf_reader", "supply page numbers after each pdf name\" files_names = argv[1::2]", "if groups: start = int(groups.group(1)) end = int(groups.group(3)) return list(range(start,", "import re range_pattern = re.compile(r'(\\d+)(\\.\\.|-)(\\d+)') comma_pattern = re.compile('\\d+(,\\d+)*') def pages_args_to_array(pages_str):", "pages)) for page in pages_to_add: pdf_writer.addPage(pdf_reader.getPage(page - 1)) with open(\"merged.pdf\",", "def pages_args_to_array(pages_str): groups = range_pattern.search(pages_str) if groups: start = int(groups.group(1))", "in pages_str.split(',')] else: raise Exception('pages should be like 1,2,3 or", "pages_args_to_array(pages_str): groups = range_pattern.search(pages_str) if groups: start = int(groups.group(1)) end", "else: raise Exception('pages should be like 1,2,3 or 1-3, but", "import argv from PyPDF2 import PdfFileReader, PdfFileWriter import re range_pattern", "from PyPDF2 import PdfFileReader, PdfFileWriter import re range_pattern = re.compile(r'(\\d+)(\\.\\.|-)(\\d+)')", "for file_name, pages in zip(files_names, pages_args): pdf_reader = PdfFileReader(file_name) last_page_index", "numbers after each pdf name\" files_names = argv[1::2] pages_args =", "if __name__ == '__main__': assert(len(argv) > 1), \"usage examle:\\npython3 selective_merge_pdf.py", "= argv[1::2] pages_args = argv[2::2] pdf_writer = PdfFileWriter() for file_name,", "pages_str.split(',')] else: raise Exception('pages should be like 1,2,3 or 1-3,", "file1.pdf 1-3 file2.pdf 3,4,10 file1.pdf 50\" assert(len(argv) % 2 ==" ]
[ "detected_vps, image_dims): \"\"\"Measures error in direction from center of detected", "VPs do not count against the score. Based on log", "/ height def vp_direction_error(ground_truth_vps, detected_vps, image_dims): \"\"\"Measures error in direction", "in the location of detected vanishing points. \"Missed\" or \"extra\"", "against the score. Based on log distance of detected vp", "on log distance of detected vp from ground truth vp.", "in a detected horizon. This measures the max distance between", "in the number of detected vanishing points. Returns: Integer, positive", "dt_vp in detected_vps: gt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1], gt_vp[0], gt_vp[1]))", "\"\"\"Calculates error in a detected horizon. This measures the max", "= geom_tools.get_line_angle(( principal_point[0], principal_point[1], gt_vp[0], gt_vp[1])) dt_angle = geom_tools.get_line_angle(( principal_point[0],", "of ground truth VP point tuples. detected_vps: List of detected", "if a horizon is missing altogether. \"\"\" if ground_truth_horizon is", "the GT horizon line. detected_horizon: Tuple with (slope, intercept) for", "in pixels. Returns: List with float degrees of error for", "the ground truth horizon line, within the image's x-axis, and", "total_error / min(len(detected_vps), len(ground_truth_vps)) def num_model_detection_error(ground_truth_vps, detected_vps): \"\"\"Measures error in", "== 0: return 0 point_pair_dists = [] for gt_vp in", "gt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1], gt_vp[0], gt_vp[1])) dt_angle = geom_tools.get_line_angle((", "List of ground truth VP point tuples. detected_vps: List of", "detected horizon line and the ground truth horizon line, within", "Error is None for missing VPs. \"\"\" principal_point = (image_dims[0]", "* x + detected_horizon[1] width, height = image_dims return max(abs(gt(0)", "return detected_horizon[0] * x + detected_horizon[1] width, height = image_dims", "ground_truth_horizon[0] * x + ground_truth_horizon[1] def dt(x): return detected_horizon[0] *", "+ detected_horizon[1] width, height = image_dims return max(abs(gt(0) - dt(0)),", "detected_horizon[0] * x + detected_horizon[1] width, height = image_dims return", "gt_vp_to_error or dt_vp in seen_dt_vps: continue gt_vp_to_error[gt_vp] = distance seen_dt_vps.add(dt_vp)", "dt_vp[1])) angle_diff = 180 - abs(abs(gt_angle - dt_angle) - 180)", "distance between the detected horizon line and the ground truth", "by image height. Args: ground_truth_horizon: Tuple with (slope, intercept) for", "min(len(detected_vps), len(ground_truth_vps)) def num_model_detection_error(ground_truth_vps, detected_vps): \"\"\"Measures error in the number", "0: return 0 point_pair_dists = [] for gt_vp in ground_truth_vps:", "in seen_gt_vps or dt_vp in seen_dt_vps: continue seen_gt_vps.add(gt_vp) seen_dt_vps.add(dt_vp) if", "points. Returns: Integer, positive when there are too many VPs,", "when there are too few. \"\"\" return len(detected_vps) - len(ground_truth_vps)", "detected horizon line. image_dims: Tuple of integers, (width, height) of", "Returns: List with float degrees of error for each ground", "intercept) for the detected horizon line. image_dims: Tuple of integers,", "ground_truth_horizon[1] def dt(x): return detected_horizon[0] * x + detected_horizon[1] width,", "number of detected vanishing points. Returns: Integer, positive when there", "GT horizon line. detected_horizon: Tuple with (slope, intercept) for the", "gt_vp in ground_truth_vps: for dt_vp in detected_vps: distance = geom_tools.point_to_point_dist(gt_vp,", "Returns: Float, or None if a horizon is missing altogether.", "closest unclaimed ground truth VP. Args: ground_truth_vps: List of ground", "log distance of detected vp from ground truth vp. Args:", "error in a detected horizon. This measures the max distance", "Each detected VP is matched with its closest unclaimed ground", "VP point tuples. Returns: Float, error. \"\"\" if len(ground_truth_vps) ==", "many VPs, negative when there are too few. \"\"\" return", "(slope, intercept) for the GT horizon line. detected_horizon: Tuple with", "for the detected horizon line. image_dims: Tuple of integers, (width,", "float degrees of error for each ground truth VP. Error", "set() total_error = 0 for distance, gt_vp, dt_vp in point_pair_dists:", "return max(abs(gt(0) - dt(0)), abs(gt(width) - dt(width))) / height def", "image height. Args: ground_truth_horizon: Tuple with (slope, intercept) for the", "2, image_dims[1] // 2) point_pair_dists = [] for gt_vp in", "within the image's x-axis, and normalized by image height. Args:", "sorted(point_pair_dists, key=lambda k: k[0]) gt_vp_to_error = {} seen_dt_vps = set()", "distance of detected vp from ground truth vp. Args: ground_truth_vps:", "principal_point[1], dt_vp[0], dt_vp[1])) angle_diff = 180 - abs(abs(gt_angle - dt_angle)", "def dt(x): return detected_horizon[0] * x + detected_horizon[1] width, height", "of the image, in pixels. Returns: List with float degrees", "in detected_vps: gt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1], gt_vp[0], gt_vp[1])) dt_angle", "dt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1], dt_vp[0], dt_vp[1])) angle_diff = 180", "point_pair_dists = [] for gt_vp in ground_truth_vps: for dt_vp in", "detected_horizon: Tuple with (slope, intercept) for the detected horizon line.", "seen_dt_vps.add(dt_vp) return [gt_vp_to_error.get(gt, None) for gt in ground_truth_vps] def location_accuracy_error(ground_truth_vps,", "[] for gt_vp in ground_truth_vps: for dt_vp in detected_vps: distance", "location_accuracy_error(ground_truth_vps, detected_vps): \"\"\"Measures average error in the location of detected", "or None if a horizon is missing altogether. \"\"\" if", "VP. Args: ground_truth_vps: List of ground truth VP point tuples.", "of detected vp from ground truth vp. Args: ground_truth_vps: List", "detected_horizon, image_dims): \"\"\"Calculates error in a detected horizon. This measures", "key=lambda k: k[0]) seen_gt_vps = set() seen_dt_vps = set() total_error", "vp. Args: ground_truth_vps: List of ground truth VP point tuples.", "gt_vp, dt_vp in point_pair_dists: if gt_vp in gt_vp_to_error or dt_vp", "the image, in pixels. Returns: List with float degrees of", "(width, height) of the image, in pixels. Returns: Float, or", "tuples. detected_vps: List of detected VP point tuples. Returns: Float,", "return total_error / min(len(detected_vps), len(ground_truth_vps)) def num_model_detection_error(ground_truth_vps, detected_vps): \"\"\"Measures error", "the score. Based on log distance of detected vp from", "horizon_error(ground_truth_horizon, detected_horizon, image_dims): \"\"\"Calculates error in a detected horizon. This", "= 180 - abs(abs(gt_angle - dt_angle) - 180) point_pair_dists.append((angle_diff, gt_vp,", "gt_vp[0], gt_vp[1])) dt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1], dt_vp[0], dt_vp[1])) angle_diff", "image_dims: Tuple of integers, (width, height) of the image, in", "of integers, (width, height) of the image, in pixels. Returns:", "dt_vp) point_pair_dists.append((distance, gt_vp, dt_vp)) sorted(point_pair_dists, key=lambda k: k[0]) seen_gt_vps =", "distance > 0: total_error += math.log(distance) return total_error / min(len(detected_vps),", "Args: ground_truth_horizon: Tuple with (slope, intercept) for the GT horizon", "len(ground_truth_vps) == 0 or len(detected_vps) == 0: return 0 point_pair_dists", "vp import geom_tools def horizon_error(ground_truth_horizon, detected_horizon, image_dims): \"\"\"Calculates error in", "or len(detected_vps) == 0: return 0 point_pair_dists = [] for", "total_error = 0 for distance, gt_vp, dt_vp in point_pair_dists: if", "= geom_tools.get_line_angle(( principal_point[0], principal_point[1], dt_vp[0], dt_vp[1])) angle_diff = 180 -", "def num_model_detection_error(ground_truth_vps, detected_vps): \"\"\"Measures error in the number of detected", "detected_vps: List of detected VP point tuples. Returns: Float, error.", "in pixels. Returns: Float, or None if a horizon is", "ground_truth_vps: List of ground truth VP point tuples. detected_vps: List", "\"\"\"Measures error in direction from center of detected vanishing points.", "0 point_pair_dists = [] for gt_vp in ground_truth_vps: for dt_vp", "if ground_truth_horizon is None or detected_horizon is None: return None", "is matched with its closest unclaimed ground truth VP. Args:", "(slope, intercept) for the detected horizon line. image_dims: Tuple of", "error in the number of detected vanishing points. Returns: Integer,", "len(ground_truth_vps)) def num_model_detection_error(ground_truth_vps, detected_vps): \"\"\"Measures error in the number of", "for each ground truth VP. Error is None for missing", "principal_point = (image_dims[0] // 2, image_dims[1] // 2) point_pair_dists =", "VP point tuples. image_dims: Tuple of integers, (width, height) of", "= distance seen_dt_vps.add(dt_vp) return [gt_vp_to_error.get(gt, None) for gt in ground_truth_vps]", "Returns: Integer, positive when there are too many VPs, negative", "/ min(len(detected_vps), len(ground_truth_vps)) def num_model_detection_error(ground_truth_vps, detected_vps): \"\"\"Measures error in the", "dt_vp)) sorted(point_pair_dists, key=lambda k: k[0]) seen_gt_vps = set() seen_dt_vps =", "image_dims): \"\"\"Measures error in direction from center of detected vanishing", "distance = geom_tools.point_to_point_dist(gt_vp, dt_vp) point_pair_dists.append((distance, gt_vp, dt_vp)) sorted(point_pair_dists, key=lambda k:", "of detected VP point tuples. image_dims: Tuple of integers, (width,", "gt_vp, dt_vp)) point_pair_dists = sorted(point_pair_dists, key=lambda k: k[0]) gt_vp_to_error =", "there are too many VPs, negative when there are too", "VP is matched with its closest unclaimed ground truth VP.", "image, in pixels. Returns: List with float degrees of error", "continue seen_gt_vps.add(gt_vp) seen_dt_vps.add(dt_vp) if distance > 0: total_error += math.log(distance)", "error for each ground truth VP. Error is None for", "\"extra\" VPs do not count against the score. Based on", "seen_gt_vps or dt_vp in seen_dt_vps: continue seen_gt_vps.add(gt_vp) seen_dt_vps.add(dt_vp) if distance", "\"\"\" principal_point = (image_dims[0] // 2, image_dims[1] // 2) point_pair_dists", "the image's x-axis, and normalized by image height. Args: ground_truth_horizon:", "import geom_tools def horizon_error(ground_truth_horizon, detected_horizon, image_dims): \"\"\"Calculates error in a", "x + detected_horizon[1] width, height = image_dims return max(abs(gt(0) -", "matched with its closest unclaimed ground truth VP. Args: ground_truth_vps:", "def vp_direction_error(ground_truth_vps, detected_vps, image_dims): \"\"\"Measures error in direction from center", "- 180) point_pair_dists.append((angle_diff, gt_vp, dt_vp)) point_pair_dists = sorted(point_pair_dists, key=lambda k:", "negative when there are too few. \"\"\" return len(detected_vps) -", "0 for distance, gt_vp, dt_vp in point_pair_dists: if gt_vp in", "detected_horizon[1] width, height = image_dims return max(abs(gt(0) - dt(0)), abs(gt(width)", "vp_direction_error(ground_truth_vps, detected_vps, image_dims): \"\"\"Measures error in direction from center of", "measures the max distance between the detected horizon line and", "if gt_vp in gt_vp_to_error or dt_vp in seen_dt_vps: continue gt_vp_to_error[gt_vp]", "average error in the location of detected vanishing points. \"Missed\"", "List of detected VP point tuples. image_dims: Tuple of integers,", "point_pair_dists: if gt_vp in seen_gt_vps or dt_vp in seen_dt_vps: continue", "detected horizon. This measures the max distance between the detected", "is missing altogether. \"\"\" if ground_truth_horizon is None or detected_horizon", "not count against the score. Based on log distance of", "VP. Error is None for missing VPs. \"\"\" principal_point =", "the number of detected vanishing points. Returns: Integer, positive when", "Float, or None if a horizon is missing altogether. \"\"\"", "height def vp_direction_error(ground_truth_vps, detected_vps, image_dims): \"\"\"Measures error in direction from", "gt_vp in ground_truth_vps: for dt_vp in detected_vps: gt_angle = geom_tools.get_line_angle((", "with (slope, intercept) for the detected horizon line. image_dims: Tuple", "- abs(abs(gt_angle - dt_angle) - 180) point_pair_dists.append((angle_diff, gt_vp, dt_vp)) point_pair_dists", "VPs, negative when there are too few. \"\"\" return len(detected_vps)", "point tuples. detected_vps: List of detected VP point tuples. image_dims:", "truth VP. Error is None for missing VPs. \"\"\" principal_point", "of detected VP point tuples. Returns: Float, error. \"\"\" if", "pixels. Returns: Float, or None if a horizon is missing", "horizon. This measures the max distance between the detected horizon", "positive when there are too many VPs, negative when there", "+ ground_truth_horizon[1] def dt(x): return detected_horizon[0] * x + detected_horizon[1]", "in ground_truth_vps: for dt_vp in detected_vps: gt_angle = geom_tools.get_line_angle(( principal_point[0],", "gt_vp in seen_gt_vps or dt_vp in seen_dt_vps: continue seen_gt_vps.add(gt_vp) seen_dt_vps.add(dt_vp)", "VP point tuples. detected_vps: List of detected VP point tuples.", "in gt_vp_to_error or dt_vp in seen_dt_vps: continue gt_vp_to_error[gt_vp] = distance", "k[0]) seen_gt_vps = set() seen_dt_vps = set() total_error = 0", "\"\"\" if ground_truth_horizon is None or detected_horizon is None: return", "distance, gt_vp, dt_vp in point_pair_dists: if gt_vp in gt_vp_to_error or", "of the image, in pixels. Returns: Float, or None if", "distance, gt_vp, dt_vp in point_pair_dists: if gt_vp in seen_gt_vps or", "gt_vp[1])) dt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1], dt_vp[0], dt_vp[1])) angle_diff =", "len(detected_vps) == 0: return 0 point_pair_dists = [] for gt_vp", "Integer, positive when there are too many VPs, negative when", "image, in pixels. Returns: Float, or None if a horizon", "sorted(point_pair_dists, key=lambda k: k[0]) seen_gt_vps = set() seen_dt_vps = set()", "// 2) point_pair_dists = [] for gt_vp in ground_truth_vps: for", "location of detected vanishing points. \"Missed\" or \"extra\" VPs do", "if gt_vp in seen_gt_vps or dt_vp in seen_dt_vps: continue seen_gt_vps.add(gt_vp)", "the location of detected vanishing points. \"Missed\" or \"extra\" VPs", "from center of detected vanishing points. Each detected VP is", "// 2, image_dims[1] // 2) point_pair_dists = [] for gt_vp", "return [gt_vp_to_error.get(gt, None) for gt in ground_truth_vps] def location_accuracy_error(ground_truth_vps, detected_vps):", "point_pair_dists = sorted(point_pair_dists, key=lambda k: k[0]) gt_vp_to_error = {} seen_dt_vps", "detected VP point tuples. image_dims: Tuple of integers, (width, height)", "x-axis, and normalized by image height. Args: ground_truth_horizon: Tuple with", "gt_vp, dt_vp in point_pair_dists: if gt_vp in seen_gt_vps or dt_vp", "def gt(x): return ground_truth_horizon[0] * x + ground_truth_horizon[1] def dt(x):", "ground truth VP. Error is None for missing VPs. \"\"\"", "score. Based on log distance of detected vp from ground", "k[0]) gt_vp_to_error = {} seen_dt_vps = set() for distance, gt_vp,", "in ground_truth_vps: for dt_vp in detected_vps: distance = geom_tools.point_to_point_dist(gt_vp, dt_vp)", "ground_truth_vps: for dt_vp in detected_vps: distance = geom_tools.point_to_point_dist(gt_vp, dt_vp) point_pair_dists.append((distance,", "in direction from center of detected vanishing points. Each detected", "degrees of error for each ground truth VP. Error is", "truth VP. Args: ground_truth_vps: List of ground truth VP point", "seen_dt_vps = set() for distance, gt_vp, dt_vp in point_pair_dists: if", "ground truth horizon line, within the image's x-axis, and normalized", "import math from vp import geom_tools def horizon_error(ground_truth_horizon, detected_horizon, image_dims):", "ground_truth_horizon: Tuple with (slope, intercept) for the GT horizon line.", "\"Missed\" or \"extra\" VPs do not count against the score.", "= set() seen_dt_vps = set() total_error = 0 for distance,", "missing VPs. \"\"\" principal_point = (image_dims[0] // 2, image_dims[1] //", "dt_vp)) point_pair_dists = sorted(point_pair_dists, key=lambda k: k[0]) gt_vp_to_error = {}", "of error for each ground truth VP. Error is None", "This measures the max distance between the detected horizon line", "normalized by image height. Args: ground_truth_horizon: Tuple with (slope, intercept)", "or dt_vp in seen_dt_vps: continue seen_gt_vps.add(gt_vp) seen_dt_vps.add(dt_vp) if distance >", "x + ground_truth_horizon[1] def dt(x): return detected_horizon[0] * x +", "== 0 or len(detected_vps) == 0: return 0 point_pair_dists =", "detected_vps: List of detected VP point tuples. image_dims: Tuple of", "\"\"\"Measures error in the number of detected vanishing points. Returns:", "of detected vanishing points. Each detected VP is matched with", "* x + ground_truth_horizon[1] def dt(x): return detected_horizon[0] * x", "= set() for distance, gt_vp, dt_vp in point_pair_dists: if gt_vp", "ground_truth_vps] def location_accuracy_error(ground_truth_vps, detected_vps): \"\"\"Measures average error in the location", "k: k[0]) gt_vp_to_error = {} seen_dt_vps = set() for distance,", "seen_dt_vps = set() total_error = 0 for distance, gt_vp, dt_vp", "horizon line. detected_horizon: Tuple with (slope, intercept) for the detected", "if len(ground_truth_vps) == 0 or len(detected_vps) == 0: return 0", "key=lambda k: k[0]) gt_vp_to_error = {} seen_dt_vps = set() for", "image_dims): \"\"\"Calculates error in a detected horizon. This measures the", "180 - abs(abs(gt_angle - dt_angle) - 180) point_pair_dists.append((angle_diff, gt_vp, dt_vp))", "= set() total_error = 0 for distance, gt_vp, dt_vp in", "detected vp from ground truth vp. Args: ground_truth_vps: List of", "each ground truth VP. Error is None for missing VPs.", "dt(width))) / height def vp_direction_error(ground_truth_vps, detected_vps, image_dims): \"\"\"Measures error in", "\"\"\"Measures average error in the location of detected vanishing points.", "for dt_vp in detected_vps: distance = geom_tools.point_to_point_dist(gt_vp, dt_vp) point_pair_dists.append((distance, gt_vp,", "max distance between the detected horizon line and the ground", "points. Each detected VP is matched with its closest unclaimed", "math from vp import geom_tools def horizon_error(ground_truth_horizon, detected_horizon, image_dims): \"\"\"Calculates", "None if a horizon is missing altogether. \"\"\" if ground_truth_horizon", "set() for distance, gt_vp, dt_vp in point_pair_dists: if gt_vp in", "line. detected_horizon: Tuple with (slope, intercept) for the detected horizon", "return ground_truth_horizon[0] * x + ground_truth_horizon[1] def dt(x): return detected_horizon[0]", "error in the location of detected vanishing points. \"Missed\" or", "List of detected VP point tuples. Returns: Float, error. \"\"\"", "gt_vp_to_error[gt_vp] = distance seen_dt_vps.add(dt_vp) return [gt_vp_to_error.get(gt, None) for gt in", "- dt(0)), abs(gt(width) - dt(width))) / height def vp_direction_error(ground_truth_vps, detected_vps,", "{} seen_dt_vps = set() for distance, gt_vp, dt_vp in point_pair_dists:", "max(abs(gt(0) - dt(0)), abs(gt(width) - dt(width))) / height def vp_direction_error(ground_truth_vps,", "center of detected vanishing points. Each detected VP is matched", "in ground_truth_vps] def location_accuracy_error(ground_truth_vps, detected_vps): \"\"\"Measures average error in the", "the max distance between the detected horizon line and the", "Args: ground_truth_vps: List of ground truth VP point tuples. detected_vps:", "\"\"\" if len(ground_truth_vps) == 0 or len(detected_vps) == 0: return", "too many VPs, negative when there are too few. \"\"\"", "- dt(width))) / height def vp_direction_error(ground_truth_vps, detected_vps, image_dims): \"\"\"Measures error", "gt(x): return ground_truth_horizon[0] * x + ground_truth_horizon[1] def dt(x): return", "- dt_angle) - 180) point_pair_dists.append((angle_diff, gt_vp, dt_vp)) point_pair_dists = sorted(point_pair_dists,", "dt_angle) - 180) point_pair_dists.append((angle_diff, gt_vp, dt_vp)) point_pair_dists = sorted(point_pair_dists, key=lambda", "dt_vp in point_pair_dists: if gt_vp in gt_vp_to_error or dt_vp in", "its closest unclaimed ground truth VP. Args: ground_truth_vps: List of", "ground_truth_vps: for dt_vp in detected_vps: gt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1],", "geom_tools.point_to_point_dist(gt_vp, dt_vp) point_pair_dists.append((distance, gt_vp, dt_vp)) sorted(point_pair_dists, key=lambda k: k[0]) seen_gt_vps", "180) point_pair_dists.append((angle_diff, gt_vp, dt_vp)) point_pair_dists = sorted(point_pair_dists, key=lambda k: k[0])", "abs(abs(gt_angle - dt_angle) - 180) point_pair_dists.append((angle_diff, gt_vp, dt_vp)) point_pair_dists =", "the image, in pixels. Returns: Float, or None if a", "Based on log distance of detected vp from ground truth", "dt_vp[0], dt_vp[1])) angle_diff = 180 - abs(abs(gt_angle - dt_angle) -", "Tuple of integers, (width, height) of the image, in pixels.", "abs(gt(width) - dt(width))) / height def vp_direction_error(ground_truth_vps, detected_vps, image_dims): \"\"\"Measures", "VPs. \"\"\" principal_point = (image_dims[0] // 2, image_dims[1] // 2)", "points. \"Missed\" or \"extra\" VPs do not count against the", "+= math.log(distance) return total_error / min(len(detected_vps), len(ground_truth_vps)) def num_model_detection_error(ground_truth_vps, detected_vps):", "detected vanishing points. Each detected VP is matched with its", "tuples. detected_vps: List of detected VP point tuples. image_dims: Tuple", "gt_vp in gt_vp_to_error or dt_vp in seen_dt_vps: continue gt_vp_to_error[gt_vp] =", "intercept) for the GT horizon line. detected_horizon: Tuple with (slope,", "def location_accuracy_error(ground_truth_vps, detected_vps): \"\"\"Measures average error in the location of", "detected_vps: distance = geom_tools.point_to_point_dist(gt_vp, dt_vp) point_pair_dists.append((distance, gt_vp, dt_vp)) sorted(point_pair_dists, key=lambda", "between the detected horizon line and the ground truth horizon", "a detected horizon. This measures the max distance between the", "the detected horizon line. image_dims: Tuple of integers, (width, height)", "None for missing VPs. \"\"\" principal_point = (image_dims[0] // 2,", "seen_dt_vps: continue seen_gt_vps.add(gt_vp) seen_dt_vps.add(dt_vp) if distance > 0: total_error +=", "(width, height) of the image, in pixels. Returns: List with", "of detected vanishing points. Returns: Integer, positive when there are", "continue gt_vp_to_error[gt_vp] = distance seen_dt_vps.add(dt_vp) return [gt_vp_to_error.get(gt, None) for gt", "seen_gt_vps = set() seen_dt_vps = set() total_error = 0 for", "altogether. \"\"\" if ground_truth_horizon is None or detected_horizon is None:", "error. \"\"\" if len(ground_truth_vps) == 0 or len(detected_vps) == 0:", "of detected vanishing points. \"Missed\" or \"extra\" VPs do not", "> 0: total_error += math.log(distance) return total_error / min(len(detected_vps), len(ground_truth_vps))", "vanishing points. Each detected VP is matched with its closest", "0 or len(detected_vps) == 0: return 0 point_pair_dists = []", "for distance, gt_vp, dt_vp in point_pair_dists: if gt_vp in gt_vp_to_error", "total_error += math.log(distance) return total_error / min(len(detected_vps), len(ground_truth_vps)) def num_model_detection_error(ground_truth_vps,", "k: k[0]) seen_gt_vps = set() seen_dt_vps = set() total_error =", "= {} seen_dt_vps = set() for distance, gt_vp, dt_vp in", "horizon line, within the image's x-axis, and normalized by image", "gt_vp_to_error = {} seen_dt_vps = set() for distance, gt_vp, dt_vp", "line. image_dims: Tuple of integers, (width, height) of the image,", "detected vanishing points. \"Missed\" or \"extra\" VPs do not count", "dt_vp in point_pair_dists: if gt_vp in seen_gt_vps or dt_vp in", "the detected horizon line and the ground truth horizon line,", "count against the score. Based on log distance of detected", "Returns: Float, error. \"\"\" if len(ground_truth_vps) == 0 or len(detected_vps)", "dt_vp in seen_dt_vps: continue gt_vp_to_error[gt_vp] = distance seen_dt_vps.add(dt_vp) return [gt_vp_to_error.get(gt,", "in detected_vps: distance = geom_tools.point_to_point_dist(gt_vp, dt_vp) point_pair_dists.append((distance, gt_vp, dt_vp)) sorted(point_pair_dists,", "vanishing points. \"Missed\" or \"extra\" VPs do not count against", "height) of the image, in pixels. Returns: Float, or None", "= sorted(point_pair_dists, key=lambda k: k[0]) gt_vp_to_error = {} seen_dt_vps =", "detected_horizon is None: return None def gt(x): return ground_truth_horizon[0] *", "detected_vps): \"\"\"Measures error in the number of detected vanishing points.", "num_model_detection_error(ground_truth_vps, detected_vps): \"\"\"Measures error in the number of detected vanishing", "or detected_horizon is None: return None def gt(x): return ground_truth_horizon[0]", "seen_dt_vps.add(dt_vp) if distance > 0: total_error += math.log(distance) return total_error", "in seen_dt_vps: continue gt_vp_to_error[gt_vp] = distance seen_dt_vps.add(dt_vp) return [gt_vp_to_error.get(gt, None)", "pixels. Returns: List with float degrees of error for each", "height. Args: ground_truth_horizon: Tuple with (slope, intercept) for the GT", "is None for missing VPs. \"\"\" principal_point = (image_dims[0] //", "None def gt(x): return ground_truth_horizon[0] * x + ground_truth_horizon[1] def", "(image_dims[0] // 2, image_dims[1] // 2) point_pair_dists = [] for", "principal_point[0], principal_point[1], gt_vp[0], gt_vp[1])) dt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1], dt_vp[0],", "a horizon is missing altogether. \"\"\" if ground_truth_horizon is None", "= geom_tools.point_to_point_dist(gt_vp, dt_vp) point_pair_dists.append((distance, gt_vp, dt_vp)) sorted(point_pair_dists, key=lambda k: k[0])", "tuples. image_dims: Tuple of integers, (width, height) of the image,", "Tuple with (slope, intercept) for the GT horizon line. detected_horizon:", "with (slope, intercept) for the GT horizon line. detected_horizon: Tuple", "ground truth VP. Args: ground_truth_vps: List of ground truth VP", "math.log(distance) return total_error / min(len(detected_vps), len(ground_truth_vps)) def num_model_detection_error(ground_truth_vps, detected_vps): \"\"\"Measures", "= image_dims return max(abs(gt(0) - dt(0)), abs(gt(width) - dt(width))) /", "for dt_vp in detected_vps: gt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1], gt_vp[0],", "dt(x): return detected_horizon[0] * x + detected_horizon[1] width, height =", "gt_vp, dt_vp)) sorted(point_pair_dists, key=lambda k: k[0]) seen_gt_vps = set() seen_dt_vps", "point_pair_dists.append((angle_diff, gt_vp, dt_vp)) point_pair_dists = sorted(point_pair_dists, key=lambda k: k[0]) gt_vp_to_error", "with float degrees of error for each ground truth VP.", "seen_gt_vps.add(gt_vp) seen_dt_vps.add(dt_vp) if distance > 0: total_error += math.log(distance) return", "detected_vps): \"\"\"Measures average error in the location of detected vanishing", "image's x-axis, and normalized by image height. Args: ground_truth_horizon: Tuple", "when there are too many VPs, negative when there are", "line, within the image's x-axis, and normalized by image height.", "truth vp. Args: ground_truth_vps: List of ground truth VP point", "dt_vp in detected_vps: distance = geom_tools.point_to_point_dist(gt_vp, dt_vp) point_pair_dists.append((distance, gt_vp, dt_vp))", "geom_tools.get_line_angle(( principal_point[0], principal_point[1], dt_vp[0], dt_vp[1])) angle_diff = 180 - abs(abs(gt_angle", "for gt_vp in ground_truth_vps: for dt_vp in detected_vps: distance =", "Tuple with (slope, intercept) for the detected horizon line. image_dims:", "return None def gt(x): return ground_truth_horizon[0] * x + ground_truth_horizon[1]", "if distance > 0: total_error += math.log(distance) return total_error /", "missing altogether. \"\"\" if ground_truth_horizon is None or detected_horizon is", "2) point_pair_dists = [] for gt_vp in ground_truth_vps: for dt_vp", "None or detected_horizon is None: return None def gt(x): return", "dt(0)), abs(gt(width) - dt(width))) / height def vp_direction_error(ground_truth_vps, detected_vps, image_dims):", "truth VP point tuples. detected_vps: List of detected VP point", "in point_pair_dists: if gt_vp in gt_vp_to_error or dt_vp in seen_dt_vps:", "and normalized by image height. Args: ground_truth_horizon: Tuple with (slope,", "is None: return None def gt(x): return ground_truth_horizon[0] * x", "detected_vps: gt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1], gt_vp[0], gt_vp[1])) dt_angle =", "detected VP is matched with its closest unclaimed ground truth", "in point_pair_dists: if gt_vp in seen_gt_vps or dt_vp in seen_dt_vps:", "distance seen_dt_vps.add(dt_vp) return [gt_vp_to_error.get(gt, None) for gt in ground_truth_vps] def", "point tuples. detected_vps: List of detected VP point tuples. Returns:", "detected VP point tuples. Returns: Float, error. \"\"\" if len(ground_truth_vps)", "line and the ground truth horizon line, within the image's", "height = image_dims return max(abs(gt(0) - dt(0)), abs(gt(width) - dt(width)))", "horizon is missing altogether. \"\"\" if ground_truth_horizon is None or", "from ground truth vp. Args: ground_truth_vps: List of ground truth", "return 0 point_pair_dists = [] for gt_vp in ground_truth_vps: for", "for gt_vp in ground_truth_vps: for dt_vp in detected_vps: gt_angle =", "for missing VPs. \"\"\" principal_point = (image_dims[0] // 2, image_dims[1]", "image_dims return max(abs(gt(0) - dt(0)), abs(gt(width) - dt(width))) / height", "tuples. Returns: Float, error. \"\"\" if len(ground_truth_vps) == 0 or", "= 0 for distance, gt_vp, dt_vp in point_pair_dists: if gt_vp", "None) for gt in ground_truth_vps] def location_accuracy_error(ground_truth_vps, detected_vps): \"\"\"Measures average", "seen_dt_vps: continue gt_vp_to_error[gt_vp] = distance seen_dt_vps.add(dt_vp) return [gt_vp_to_error.get(gt, None) for", "width, height = image_dims return max(abs(gt(0) - dt(0)), abs(gt(width) -", "are too many VPs, negative when there are too few.", "for the GT horizon line. detected_horizon: Tuple with (slope, intercept)", "def horizon_error(ground_truth_horizon, detected_horizon, image_dims): \"\"\"Calculates error in a detected horizon.", "None: return None def gt(x): return ground_truth_horizon[0] * x +", "point tuples. Returns: Float, error. \"\"\" if len(ground_truth_vps) == 0", "unclaimed ground truth VP. Args: ground_truth_vps: List of ground truth", "0: total_error += math.log(distance) return total_error / min(len(detected_vps), len(ground_truth_vps)) def", "= (image_dims[0] // 2, image_dims[1] // 2) point_pair_dists = []", "List with float degrees of error for each ground truth", "Float, error. \"\"\" if len(ground_truth_vps) == 0 or len(detected_vps) ==", "vanishing points. Returns: Integer, positive when there are too many", "height) of the image, in pixels. Returns: List with float", "ground truth vp. Args: ground_truth_vps: List of ground truth VP", "[gt_vp_to_error.get(gt, None) for gt in ground_truth_vps] def location_accuracy_error(ground_truth_vps, detected_vps): \"\"\"Measures", "or \"extra\" VPs do not count against the score. Based", "[] for gt_vp in ground_truth_vps: for dt_vp in detected_vps: gt_angle", "ground_truth_horizon is None or detected_horizon is None: return None def", "truth horizon line, within the image's x-axis, and normalized by", "angle_diff = 180 - abs(abs(gt_angle - dt_angle) - 180) point_pair_dists.append((angle_diff,", "vp from ground truth vp. Args: ground_truth_vps: List of ground", "ground truth VP point tuples. detected_vps: List of detected VP", "dt_vp in seen_dt_vps: continue seen_gt_vps.add(gt_vp) seen_dt_vps.add(dt_vp) if distance > 0:", "for distance, gt_vp, dt_vp in point_pair_dists: if gt_vp in seen_gt_vps", "detected vanishing points. Returns: Integer, positive when there are too", "integers, (width, height) of the image, in pixels. Returns: List", "point_pair_dists: if gt_vp in gt_vp_to_error or dt_vp in seen_dt_vps: continue", "point tuples. image_dims: Tuple of integers, (width, height) of the", "do not count against the score. Based on log distance", "for gt in ground_truth_vps] def location_accuracy_error(ground_truth_vps, detected_vps): \"\"\"Measures average error", "point_pair_dists.append((distance, gt_vp, dt_vp)) sorted(point_pair_dists, key=lambda k: k[0]) seen_gt_vps = set()", "and the ground truth horizon line, within the image's x-axis,", "is None or detected_horizon is None: return None def gt(x):", "set() seen_dt_vps = set() total_error = 0 for distance, gt_vp,", "horizon line. image_dims: Tuple of integers, (width, height) of the", "principal_point[1], gt_vp[0], gt_vp[1])) dt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1], dt_vp[0], dt_vp[1]))", "error in direction from center of detected vanishing points. Each", "principal_point[0], principal_point[1], dt_vp[0], dt_vp[1])) angle_diff = 180 - abs(abs(gt_angle -", "horizon line and the ground truth horizon line, within the", "or dt_vp in seen_dt_vps: continue gt_vp_to_error[gt_vp] = distance seen_dt_vps.add(dt_vp) return", "image_dims[1] // 2) point_pair_dists = [] for gt_vp in ground_truth_vps:", "with its closest unclaimed ground truth VP. Args: ground_truth_vps: List", "in seen_dt_vps: continue seen_gt_vps.add(gt_vp) seen_dt_vps.add(dt_vp) if distance > 0: total_error", "geom_tools.get_line_angle(( principal_point[0], principal_point[1], gt_vp[0], gt_vp[1])) dt_angle = geom_tools.get_line_angle(( principal_point[0], principal_point[1],", "= [] for gt_vp in ground_truth_vps: for dt_vp in detected_vps:", "geom_tools def horizon_error(ground_truth_horizon, detected_horizon, image_dims): \"\"\"Calculates error in a detected", "direction from center of detected vanishing points. Each detected VP", "gt in ground_truth_vps] def location_accuracy_error(ground_truth_vps, detected_vps): \"\"\"Measures average error in", "integers, (width, height) of the image, in pixels. Returns: Float,", "from vp import geom_tools def horizon_error(ground_truth_horizon, detected_horizon, image_dims): \"\"\"Calculates error" ]
[ "replace=False) args.task_id = int(task_list[proc_id()]) _robots = [\"IIWA\", \"Jaco\", \"Kinova3\", \"Panda\"]", "type=int, default=-1) parser.add_argument('--task-id', type=int, default=-1) parser.add_argument('--hid', type=int, default=256) parser.add_argument('--l', type=int,", "default=0.02) parser.add_argument('--ent-coef', type=float, default=0.02) parser.add_argument('--log-std-init', type=float, default=0.) parser.add_argument('--controller', type=str, default=\"joint\")", "str(args.obstacle) args.exp_name = 'MTL_{}'.format(len(task_list)) return args def main(): torch.backends.cudnn.deterministic =", "= setup_logger_kwargs( args.exp_name, data_dir=args.data_dir) checkpoint = None if args.load_dir is", "exist_ok=True) with open(os.path.join(args.data_dir, args.exp_name, 'args_{}.json'.format(proc_id())), 'w') as f: json.dump(args.__dict__, f,", "torch.load(os.path.join(args.load_dir, 'pyt_save', 'state_dicts.pt')) ppo(lambda: composition.make( args.robot, args.object, args.obstacle, args.task, args.controller,", "parser.add_argument('--data-dir', default='spinningup_training/logs') parser.add_argument('--load-dir', default=None) parser.add_argument('--gridsearch-id', type=int, default=-1) parser.add_argument('--task-id', type=int, default=-1)", "type=str, default='ppo') parser.add_argument('--clip', type=float, default=0.2) parser.add_argument('--pi-lr', type=float, default=1e-4) parser.add_argument('--vf-lr', type=float,", "type=int, default=2) parser.add_argument('--gamma', type=float, default=0.99) parser.add_argument('--seed', '-s', type=int, default=4) parser.add_argument('--cpu',", "\"_name:\" + args.exp_name + \"_robot:\" + str(args.robot) + \"_task:\" +", "default=\"IIWA\") parser.add_argument('--object', type=str, default=\"Hollowbox\") parser.add_argument('--obstacle', type=str, default=None) parser.add_argument('--task', type=str, default=\"PickPlace\")", "np.random.choice(256, num_procs(), replace=False) args.task_id = int(task_list[proc_id()]) _robots = [\"IIWA\", \"Jaco\",", "_objects = [\"Box\", \"Dumbbell\", \"Plate\", \"Hollowbox\"] _objectives = [\"PickPlace\", \"Push\",", "ac_kwargs=dict(hidden_sizes=[args.hid]*args.l, log_std_init=args.log_std_init), seed=args.seed, gamma=args.gamma, steps_per_epoch=args.steps, epochs=args.epochs, clip_ratio=args.clip, pi_lr=args.pi_lr, vf_lr=args.vf_lr, train_pi_iters=args.pi_iters,", "target_kl=args.target_kl, logger_kwargs=logger_kwargs, max_ep_len=args.horizon, ent_coef=args.ent_coef, log_per_proc=True, checkpoint=checkpoint) if __name__ == '__main__':", "default=128) parser.add_argument('--target-kl', type=float, default=0.02) parser.add_argument('--ent-coef', type=float, default=0.02) parser.add_argument('--log-std-init', type=float, default=0.)", "type=float, default=0.2) parser.add_argument('--pi-lr', type=float, default=1e-4) parser.add_argument('--vf-lr', type=float, default=1e-4) parser.add_argument('--pi-iters', type=int,", "task_list = np.random.choice(256, num_procs(), replace=False) args.task_id = int(task_list[proc_id()]) _robots =", "setup_logger_kwargs( args.exp_name, data_dir=args.data_dir) checkpoint = None if args.load_dir is not", "composition import os import json import torch from spinup.algos.pytorch.ppo.core import", "default=0.2) parser.add_argument('--pi-lr', type=float, default=1e-4) parser.add_argument('--vf-lr', type=float, default=1e-4) parser.add_argument('--pi-iters', type=int, default=128)", "np.unravel_index(args.task_id, (len(_robots), len(_objects), len(_objectives), len(_obstacles))) args.robot = _robots[idx[0]] args.object =", "+ \"_obstacle:\" + str(args.obstacle) args.exp_name = 'MTL_{}'.format(len(task_list)) return args def", "None: checkpoint = torch.load(os.path.join(args.load_dir, 'pyt_save', 'state_dicts.pt')) ppo(lambda: composition.make( args.robot, args.object,", "len(_objects), len(_objectives), len(_obstacles))) args.robot = _robots[idx[0]] args.object = _objects[idx[1]] args.task", "args.horizon, use_task_id_obs=True), actor_critic=MLPActorCritic, ac_kwargs=dict(hidden_sizes=[args.hid]*args.l, log_std_init=args.log_std_init), seed=args.seed, gamma=args.gamma, steps_per_epoch=args.steps, epochs=args.epochs, clip_ratio=args.clip,", "_robots = [\"IIWA\", \"Jaco\", \"Kinova3\", \"Panda\"] _objects = [\"Box\", \"Dumbbell\",", "default=4) parser.add_argument('--steps', type=int, default=16000) parser.add_argument('--epochs', type=int, default=625) parser.add_argument('--exp-name', type=str, default='ppo')", "parser.add_argument('--pi-lr', type=float, default=1e-4) parser.add_argument('--vf-lr', type=float, default=1e-4) parser.add_argument('--pi-iters', type=int, default=128) parser.add_argument('--vf-iters',", "= parser.parse_args() np.random.seed(args.seed) task_list = np.random.choice(256, num_procs(), replace=False) args.task_id =", "type=int, default=-1) parser.add_argument('--hid', type=int, default=256) parser.add_argument('--l', type=int, default=2) parser.add_argument('--gamma', type=float,", "default=4) parser.add_argument('--cpu', type=int, default=4) parser.add_argument('--steps', type=int, default=16000) parser.add_argument('--epochs', type=int, default=625)", "parser.add_argument('--vf-iters', type=int, default=128) parser.add_argument('--target-kl', type=float, default=0.02) parser.add_argument('--ent-coef', type=float, default=0.02) parser.add_argument('--log-std-init',", "torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False torch.set_num_threads(1) args = parse_args()", "default=\"PickPlace\") parser.add_argument('--horizon', type=int, default=500) args = parser.parse_args() np.random.seed(args.seed) task_list =", "indent=2) logger_kwargs = setup_logger_kwargs( args.exp_name, data_dir=args.data_dir) checkpoint = None if", "type=str, default=None) parser.add_argument('--task', type=str, default=\"PickPlace\") parser.add_argument('--horizon', type=int, default=500) args =", "import MLPActorCritic from spinup.algos.pytorch.ppo.ppo import ppo from spinup.utils.run_utils import setup_logger_kwargs", "epochs=args.epochs, clip_ratio=args.clip, pi_lr=args.pi_lr, vf_lr=args.vf_lr, train_pi_iters=args.pi_iters, train_v_iters=args.vf_iters, target_kl=args.target_kl, logger_kwargs=logger_kwargs, max_ep_len=args.horizon, ent_coef=args.ent_coef,", "type=int, default=4) parser.add_argument('--cpu', type=int, default=4) parser.add_argument('--steps', type=int, default=16000) parser.add_argument('--epochs', type=int,", "parser.add_argument('--controller', type=str, default=\"joint\") parser.add_argument('--robot', type=str, default=\"IIWA\") parser.add_argument('--object', type=str, default=\"Hollowbox\") parser.add_argument('--obstacle',", "parser.add_argument('--task-id', type=int, default=-1) parser.add_argument('--hid', type=int, default=256) parser.add_argument('--l', type=int, default=2) parser.add_argument('--gamma',", "\"Push\", \"Shelf\", \"Trashcan\"] _obstacles = [\"None\", \"GoalWall\", \"ObjectDoor\", \"ObjectWall\"] idx", "parser.add_argument('--robot', type=str, default=\"IIWA\") parser.add_argument('--object', type=str, default=\"Hollowbox\") parser.add_argument('--obstacle', type=str, default=None) parser.add_argument('--task',", "args = parser.parse_args() np.random.seed(args.seed) task_list = np.random.choice(256, num_procs(), replace=False) args.task_id", "args.controller, args.horizon, use_task_id_obs=True), actor_critic=MLPActorCritic, ac_kwargs=dict(hidden_sizes=[args.hid]*args.l, log_std_init=args.log_std_init), seed=args.seed, gamma=args.gamma, steps_per_epoch=args.steps, epochs=args.epochs,", "+ str(args.obstacle) args.exp_name = 'MTL_{}'.format(len(task_list)) return args def main(): torch.backends.cudnn.deterministic", "args.exp_name), exist_ok=True) with open(os.path.join(args.data_dir, args.exp_name, 'args_{}.json'.format(proc_id())), 'w') as f: json.dump(args.__dict__,", "import ppo from spinup.utils.run_utils import setup_logger_kwargs from spinup.utils.mpi_tools import proc_id,", "parser.add_argument('--target-kl', type=float, default=0.02) parser.add_argument('--ent-coef', type=float, default=0.02) parser.add_argument('--log-std-init', type=float, default=0.) parser.add_argument('--controller',", "type=int, default=625) parser.add_argument('--exp-name', type=str, default='ppo') parser.add_argument('--clip', type=float, default=0.2) parser.add_argument('--pi-lr', type=float,", "True torch.backends.cudnn.benchmark = False torch.set_num_threads(1) args = parse_args() os.makedirs(os.path.join(args.data_dir, args.exp_name),", "# args.exp_name = \"t:\" + str(args.task_id) + \"_name:\" + args.exp_name", "default=1e-4) parser.add_argument('--pi-iters', type=int, default=128) parser.add_argument('--vf-iters', type=int, default=128) parser.add_argument('--target-kl', type=float, default=0.02)", "str(args.object) + \"_obstacle:\" + str(args.obstacle) args.exp_name = 'MTL_{}'.format(len(task_list)) return args", "+ \"_object:\" + str(args.object) + \"_obstacle:\" + str(args.obstacle) args.exp_name =", "argparse import composition import os import json import torch from", "proc_id, num_procs def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('--data-dir', default='spinningup_training/logs') parser.add_argument('--load-dir',", "composition.make( args.robot, args.object, args.obstacle, args.task, args.controller, args.horizon, use_task_id_obs=True), actor_critic=MLPActorCritic, ac_kwargs=dict(hidden_sizes=[args.hid]*args.l,", "np.random.seed(args.seed) task_list = np.random.choice(256, num_procs(), replace=False) args.task_id = int(task_list[proc_id()]) _robots", "parser.add_argument('--ent-coef', type=float, default=0.02) parser.add_argument('--log-std-init', type=float, default=0.) parser.add_argument('--controller', type=str, default=\"joint\") parser.add_argument('--robot',", "parse_args(): parser = argparse.ArgumentParser() parser.add_argument('--data-dir', default='spinningup_training/logs') parser.add_argument('--load-dir', default=None) parser.add_argument('--gridsearch-id', type=int,", "\"_object:\" + str(args.object) + \"_obstacle:\" + str(args.obstacle) args.exp_name = 'MTL_{}'.format(len(task_list))", "args.object, args.obstacle, args.task, args.controller, args.horizon, use_task_id_obs=True), actor_critic=MLPActorCritic, ac_kwargs=dict(hidden_sizes=[args.hid]*args.l, log_std_init=args.log_std_init), seed=args.seed,", "type=int, default=16000) parser.add_argument('--epochs', type=int, default=625) parser.add_argument('--exp-name', type=str, default='ppo') parser.add_argument('--clip', type=float,", "f: json.dump(args.__dict__, f, indent=2) logger_kwargs = setup_logger_kwargs( args.exp_name, data_dir=args.data_dir) checkpoint", "'pyt_save', 'state_dicts.pt')) ppo(lambda: composition.make( args.robot, args.object, args.obstacle, args.task, args.controller, args.horizon,", "return args def main(): torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False", "data_dir=args.data_dir) checkpoint = None if args.load_dir is not None: checkpoint", "parser.add_argument('--log-std-init', type=float, default=0.) parser.add_argument('--controller', type=str, default=\"joint\") parser.add_argument('--robot', type=str, default=\"IIWA\") parser.add_argument('--object',", "import composition import os import json import torch from spinup.algos.pytorch.ppo.core", "default=None) parser.add_argument('--gridsearch-id', type=int, default=-1) parser.add_argument('--task-id', type=int, default=-1) parser.add_argument('--hid', type=int, default=256)", "_obstacles[idx[3]] # args.exp_name = \"t:\" + str(args.task_id) + \"_name:\" +", "= int(task_list[proc_id()]) _robots = [\"IIWA\", \"Jaco\", \"Kinova3\", \"Panda\"] _objects =", "parser.add_argument('--vf-lr', type=float, default=1e-4) parser.add_argument('--pi-iters', type=int, default=128) parser.add_argument('--vf-iters', type=int, default=128) parser.add_argument('--target-kl',", "\"ObjectWall\"] idx = np.unravel_index(args.task_id, (len(_robots), len(_objects), len(_objectives), len(_obstacles))) args.robot =", "import proc_id, num_procs def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('--data-dir', default='spinningup_training/logs')", "\"Trashcan\"] _obstacles = [\"None\", \"GoalWall\", \"ObjectDoor\", \"ObjectWall\"] idx = np.unravel_index(args.task_id,", "None if args.load_dir is not None: checkpoint = torch.load(os.path.join(args.load_dir, 'pyt_save',", "default=-1) parser.add_argument('--hid', type=int, default=256) parser.add_argument('--l', type=int, default=2) parser.add_argument('--gamma', type=float, default=0.99)", "type=float, default=1e-4) parser.add_argument('--vf-lr', type=float, default=1e-4) parser.add_argument('--pi-iters', type=int, default=128) parser.add_argument('--vf-iters', type=int,", "torch from spinup.algos.pytorch.ppo.core import MLPActorCritic from spinup.algos.pytorch.ppo.ppo import ppo from", "MLPActorCritic from spinup.algos.pytorch.ppo.ppo import ppo from spinup.utils.run_utils import setup_logger_kwargs from", "with open(os.path.join(args.data_dir, args.exp_name, 'args_{}.json'.format(proc_id())), 'w') as f: json.dump(args.__dict__, f, indent=2)", "args.robot, args.object, args.obstacle, args.task, args.controller, args.horizon, use_task_id_obs=True), actor_critic=MLPActorCritic, ac_kwargs=dict(hidden_sizes=[args.hid]*args.l, log_std_init=args.log_std_init),", "[\"PickPlace\", \"Push\", \"Shelf\", \"Trashcan\"] _obstacles = [\"None\", \"GoalWall\", \"ObjectDoor\", \"ObjectWall\"]", "args.obstacle = _obstacles[idx[3]] # args.exp_name = \"t:\" + str(args.task_id) +", "type=int, default=128) parser.add_argument('--vf-iters', type=int, default=128) parser.add_argument('--target-kl', type=float, default=0.02) parser.add_argument('--ent-coef', type=float,", "\"Plate\", \"Hollowbox\"] _objectives = [\"PickPlace\", \"Push\", \"Shelf\", \"Trashcan\"] _obstacles =", "_objectives[idx[2]] args.obstacle = _obstacles[idx[3]] # args.exp_name = \"t:\" + str(args.task_id)", "default=0.02) parser.add_argument('--log-std-init', type=float, default=0.) parser.add_argument('--controller', type=str, default=\"joint\") parser.add_argument('--robot', type=str, default=\"IIWA\")", "= True torch.backends.cudnn.benchmark = False torch.set_num_threads(1) args = parse_args() os.makedirs(os.path.join(args.data_dir,", "= _objects[idx[1]] args.task = _objectives[idx[2]] args.obstacle = _obstacles[idx[3]] # args.exp_name", "numpy as np import argparse import composition import os import", "'-s', type=int, default=4) parser.add_argument('--cpu', type=int, default=4) parser.add_argument('--steps', type=int, default=16000) parser.add_argument('--epochs',", "parser.add_argument('--pi-iters', type=int, default=128) parser.add_argument('--vf-iters', type=int, default=128) parser.add_argument('--target-kl', type=float, default=0.02) parser.add_argument('--ent-coef',", "= np.random.choice(256, num_procs(), replace=False) args.task_id = int(task_list[proc_id()]) _robots = [\"IIWA\",", "default=128) parser.add_argument('--vf-iters', type=int, default=128) parser.add_argument('--target-kl', type=float, default=0.02) parser.add_argument('--ent-coef', type=float, default=0.02)", "= np.unravel_index(args.task_id, (len(_robots), len(_objects), len(_objectives), len(_obstacles))) args.robot = _robots[idx[0]] args.object", "\"Hollowbox\"] _objectives = [\"PickPlace\", \"Push\", \"Shelf\", \"Trashcan\"] _obstacles = [\"None\",", "+ str(args.robot) + \"_task:\" + str(args.task) + \"_object:\" + str(args.object)", "\"Dumbbell\", \"Plate\", \"Hollowbox\"] _objectives = [\"PickPlace\", \"Push\", \"Shelf\", \"Trashcan\"] _obstacles", "logger_kwargs=logger_kwargs, max_ep_len=args.horizon, ent_coef=args.ent_coef, log_per_proc=True, checkpoint=checkpoint) if __name__ == '__main__': main()", "args.exp_name, 'args_{}.json'.format(proc_id())), 'w') as f: json.dump(args.__dict__, f, indent=2) logger_kwargs =", "import json import torch from spinup.algos.pytorch.ppo.core import MLPActorCritic from spinup.algos.pytorch.ppo.ppo", "logger_kwargs = setup_logger_kwargs( args.exp_name, data_dir=args.data_dir) checkpoint = None if args.load_dir", "\"ObjectDoor\", \"ObjectWall\"] idx = np.unravel_index(args.task_id, (len(_robots), len(_objects), len(_objectives), len(_obstacles))) args.robot", "args.exp_name = \"t:\" + str(args.task_id) + \"_name:\" + args.exp_name +", "train_pi_iters=args.pi_iters, train_v_iters=args.vf_iters, target_kl=args.target_kl, logger_kwargs=logger_kwargs, max_ep_len=args.horizon, ent_coef=args.ent_coef, log_per_proc=True, checkpoint=checkpoint) if __name__", "= False torch.set_num_threads(1) args = parse_args() os.makedirs(os.path.join(args.data_dir, args.exp_name), exist_ok=True) with", "args.load_dir is not None: checkpoint = torch.load(os.path.join(args.load_dir, 'pyt_save', 'state_dicts.pt')) ppo(lambda:", "from spinup.algos.pytorch.ppo.ppo import ppo from spinup.utils.run_utils import setup_logger_kwargs from spinup.utils.mpi_tools", "parser = argparse.ArgumentParser() parser.add_argument('--data-dir', default='spinningup_training/logs') parser.add_argument('--load-dir', default=None) parser.add_argument('--gridsearch-id', type=int, default=-1)", "= _obstacles[idx[3]] # args.exp_name = \"t:\" + str(args.task_id) + \"_name:\"", "parser.add_argument('--steps', type=int, default=16000) parser.add_argument('--epochs', type=int, default=625) parser.add_argument('--exp-name', type=str, default='ppo') parser.add_argument('--clip',", "parser.add_argument('--object', type=str, default=\"Hollowbox\") parser.add_argument('--obstacle', type=str, default=None) parser.add_argument('--task', type=str, default=\"PickPlace\") parser.add_argument('--horizon',", "len(_objectives), len(_obstacles))) args.robot = _robots[idx[0]] args.object = _objects[idx[1]] args.task =", "= [\"IIWA\", \"Jaco\", \"Kinova3\", \"Panda\"] _objects = [\"Box\", \"Dumbbell\", \"Plate\",", "type=float, default=0.02) parser.add_argument('--log-std-init', type=float, default=0.) parser.add_argument('--controller', type=str, default=\"joint\") parser.add_argument('--robot', type=str,", "parser.add_argument('--l', type=int, default=2) parser.add_argument('--gamma', type=float, default=0.99) parser.add_argument('--seed', '-s', type=int, default=4)", "= _robots[idx[0]] args.object = _objects[idx[1]] args.task = _objectives[idx[2]] args.obstacle =", "+ \"_task:\" + str(args.task) + \"_object:\" + str(args.object) + \"_obstacle:\"", "parser.add_argument('--load-dir', default=None) parser.add_argument('--gridsearch-id', type=int, default=-1) parser.add_argument('--task-id', type=int, default=-1) parser.add_argument('--hid', type=int,", "import argparse import composition import os import json import torch", "as f: json.dump(args.__dict__, f, indent=2) logger_kwargs = setup_logger_kwargs( args.exp_name, data_dir=args.data_dir)", "+ str(args.object) + \"_obstacle:\" + str(args.obstacle) args.exp_name = 'MTL_{}'.format(len(task_list)) return", "ppo from spinup.utils.run_utils import setup_logger_kwargs from spinup.utils.mpi_tools import proc_id, num_procs", "np import argparse import composition import os import json import", "= [\"PickPlace\", \"Push\", \"Shelf\", \"Trashcan\"] _obstacles = [\"None\", \"GoalWall\", \"ObjectDoor\",", "checkpoint = None if args.load_dir is not None: checkpoint =", "default=0.99) parser.add_argument('--seed', '-s', type=int, default=4) parser.add_argument('--cpu', type=int, default=4) parser.add_argument('--steps', type=int,", "parser.add_argument('--seed', '-s', type=int, default=4) parser.add_argument('--cpu', type=int, default=4) parser.add_argument('--steps', type=int, default=16000)", "args.robot = _robots[idx[0]] args.object = _objects[idx[1]] args.task = _objectives[idx[2]] args.obstacle", "idx = np.unravel_index(args.task_id, (len(_robots), len(_objects), len(_objectives), len(_obstacles))) args.robot = _robots[idx[0]]", "default=2) parser.add_argument('--gamma', type=float, default=0.99) parser.add_argument('--seed', '-s', type=int, default=4) parser.add_argument('--cpu', type=int,", "str(args.robot) + \"_task:\" + str(args.task) + \"_object:\" + str(args.object) +", "'MTL_{}'.format(len(task_list)) return args def main(): torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark =", "parser.add_argument('--epochs', type=int, default=625) parser.add_argument('--exp-name', type=str, default='ppo') parser.add_argument('--clip', type=float, default=0.2) parser.add_argument('--pi-lr',", "num_procs def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('--data-dir', default='spinningup_training/logs') parser.add_argument('--load-dir', default=None)", "_robots[idx[0]] args.object = _objects[idx[1]] args.task = _objectives[idx[2]] args.obstacle = _obstacles[idx[3]]", "= None if args.load_dir is not None: checkpoint = torch.load(os.path.join(args.load_dir,", "import torch from spinup.algos.pytorch.ppo.core import MLPActorCritic from spinup.algos.pytorch.ppo.ppo import ppo", "= \"t:\" + str(args.task_id) + \"_name:\" + args.exp_name + \"_robot:\"", "[\"None\", \"GoalWall\", \"ObjectDoor\", \"ObjectWall\"] idx = np.unravel_index(args.task_id, (len(_robots), len(_objects), len(_objectives),", "ppo(lambda: composition.make( args.robot, args.object, args.obstacle, args.task, args.controller, args.horizon, use_task_id_obs=True), actor_critic=MLPActorCritic,", "\"Shelf\", \"Trashcan\"] _obstacles = [\"None\", \"GoalWall\", \"ObjectDoor\", \"ObjectWall\"] idx =", "main(): torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False torch.set_num_threads(1) args =", "default=\"joint\") parser.add_argument('--robot', type=str, default=\"IIWA\") parser.add_argument('--object', type=str, default=\"Hollowbox\") parser.add_argument('--obstacle', type=str, default=None)", "\"Panda\"] _objects = [\"Box\", \"Dumbbell\", \"Plate\", \"Hollowbox\"] _objectives = [\"PickPlace\",", "import setup_logger_kwargs from spinup.utils.mpi_tools import proc_id, num_procs def parse_args(): parser", "_obstacles = [\"None\", \"GoalWall\", \"ObjectDoor\", \"ObjectWall\"] idx = np.unravel_index(args.task_id, (len(_robots),", "log_std_init=args.log_std_init), seed=args.seed, gamma=args.gamma, steps_per_epoch=args.steps, epochs=args.epochs, clip_ratio=args.clip, pi_lr=args.pi_lr, vf_lr=args.vf_lr, train_pi_iters=args.pi_iters, train_v_iters=args.vf_iters,", "default=-1) parser.add_argument('--task-id', type=int, default=-1) parser.add_argument('--hid', type=int, default=256) parser.add_argument('--l', type=int, default=2)", "\"_robot:\" + str(args.robot) + \"_task:\" + str(args.task) + \"_object:\" +", "parse_args() os.makedirs(os.path.join(args.data_dir, args.exp_name), exist_ok=True) with open(os.path.join(args.data_dir, args.exp_name, 'args_{}.json'.format(proc_id())), 'w') as", "type=float, default=0.) parser.add_argument('--controller', type=str, default=\"joint\") parser.add_argument('--robot', type=str, default=\"IIWA\") parser.add_argument('--object', type=str,", "type=str, default=\"joint\") parser.add_argument('--robot', type=str, default=\"IIWA\") parser.add_argument('--object', type=str, default=\"Hollowbox\") parser.add_argument('--obstacle', type=str,", "is not None: checkpoint = torch.load(os.path.join(args.load_dir, 'pyt_save', 'state_dicts.pt')) ppo(lambda: composition.make(", "type=str, default=\"IIWA\") parser.add_argument('--object', type=str, default=\"Hollowbox\") parser.add_argument('--obstacle', type=str, default=None) parser.add_argument('--task', type=str,", "= torch.load(os.path.join(args.load_dir, 'pyt_save', 'state_dicts.pt')) ppo(lambda: composition.make( args.robot, args.object, args.obstacle, args.task,", "import os import json import torch from spinup.algos.pytorch.ppo.core import MLPActorCritic", "f, indent=2) logger_kwargs = setup_logger_kwargs( args.exp_name, data_dir=args.data_dir) checkpoint = None", "parser.add_argument('--gamma', type=float, default=0.99) parser.add_argument('--seed', '-s', type=int, default=4) parser.add_argument('--cpu', type=int, default=4)", "parser.add_argument('--task', type=str, default=\"PickPlace\") parser.add_argument('--horizon', type=int, default=500) args = parser.parse_args() np.random.seed(args.seed)", "default=625) parser.add_argument('--exp-name', type=str, default='ppo') parser.add_argument('--clip', type=float, default=0.2) parser.add_argument('--pi-lr', type=float, default=1e-4)", "default=\"Hollowbox\") parser.add_argument('--obstacle', type=str, default=None) parser.add_argument('--task', type=str, default=\"PickPlace\") parser.add_argument('--horizon', type=int, default=500)", "type=str, default=\"PickPlace\") parser.add_argument('--horizon', type=int, default=500) args = parser.parse_args() np.random.seed(args.seed) task_list", "torch.backends.cudnn.benchmark = False torch.set_num_threads(1) args = parse_args() os.makedirs(os.path.join(args.data_dir, args.exp_name), exist_ok=True)", "parser.add_argument('--obstacle', type=str, default=None) parser.add_argument('--task', type=str, default=\"PickPlace\") parser.add_argument('--horizon', type=int, default=500) args", "type=float, default=0.99) parser.add_argument('--seed', '-s', type=int, default=4) parser.add_argument('--cpu', type=int, default=4) parser.add_argument('--steps',", "from spinup.utils.mpi_tools import proc_id, num_procs def parse_args(): parser = argparse.ArgumentParser()", "parser.add_argument('--clip', type=float, default=0.2) parser.add_argument('--pi-lr', type=float, default=1e-4) parser.add_argument('--vf-lr', type=float, default=1e-4) parser.add_argument('--pi-iters',", "= _objectives[idx[2]] args.obstacle = _obstacles[idx[3]] # args.exp_name = \"t:\" +", "str(args.task_id) + \"_name:\" + args.exp_name + \"_robot:\" + str(args.robot) +", "parser.parse_args() np.random.seed(args.seed) task_list = np.random.choice(256, num_procs(), replace=False) args.task_id = int(task_list[proc_id()])", "open(os.path.join(args.data_dir, args.exp_name, 'args_{}.json'.format(proc_id())), 'w') as f: json.dump(args.__dict__, f, indent=2) logger_kwargs", "gamma=args.gamma, steps_per_epoch=args.steps, epochs=args.epochs, clip_ratio=args.clip, pi_lr=args.pi_lr, vf_lr=args.vf_lr, train_pi_iters=args.pi_iters, train_v_iters=args.vf_iters, target_kl=args.target_kl, logger_kwargs=logger_kwargs,", "os import json import torch from spinup.algos.pytorch.ppo.core import MLPActorCritic from", "vf_lr=args.vf_lr, train_pi_iters=args.pi_iters, train_v_iters=args.vf_iters, target_kl=args.target_kl, logger_kwargs=logger_kwargs, max_ep_len=args.horizon, ent_coef=args.ent_coef, log_per_proc=True, checkpoint=checkpoint) if", "args.task = _objectives[idx[2]] args.obstacle = _obstacles[idx[3]] # args.exp_name = \"t:\"", "actor_critic=MLPActorCritic, ac_kwargs=dict(hidden_sizes=[args.hid]*args.l, log_std_init=args.log_std_init), seed=args.seed, gamma=args.gamma, steps_per_epoch=args.steps, epochs=args.epochs, clip_ratio=args.clip, pi_lr=args.pi_lr, vf_lr=args.vf_lr,", "default=16000) parser.add_argument('--epochs', type=int, default=625) parser.add_argument('--exp-name', type=str, default='ppo') parser.add_argument('--clip', type=float, default=0.2)", "type=int, default=128) parser.add_argument('--target-kl', type=float, default=0.02) parser.add_argument('--ent-coef', type=float, default=0.02) parser.add_argument('--log-std-init', type=float,", "spinup.algos.pytorch.ppo.ppo import ppo from spinup.utils.run_utils import setup_logger_kwargs from spinup.utils.mpi_tools import", "setup_logger_kwargs from spinup.utils.mpi_tools import proc_id, num_procs def parse_args(): parser =", "def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('--data-dir', default='spinningup_training/logs') parser.add_argument('--load-dir', default=None) parser.add_argument('--gridsearch-id',", "spinup.algos.pytorch.ppo.core import MLPActorCritic from spinup.algos.pytorch.ppo.ppo import ppo from spinup.utils.run_utils import", "+ args.exp_name + \"_robot:\" + str(args.robot) + \"_task:\" + str(args.task)", "torch.set_num_threads(1) args = parse_args() os.makedirs(os.path.join(args.data_dir, args.exp_name), exist_ok=True) with open(os.path.join(args.data_dir, args.exp_name,", "_objects[idx[1]] args.task = _objectives[idx[2]] args.obstacle = _obstacles[idx[3]] # args.exp_name =", "type=int, default=256) parser.add_argument('--l', type=int, default=2) parser.add_argument('--gamma', type=float, default=0.99) parser.add_argument('--seed', '-s',", "type=str, default=\"Hollowbox\") parser.add_argument('--obstacle', type=str, default=None) parser.add_argument('--task', type=str, default=\"PickPlace\") parser.add_argument('--horizon', type=int,", "default=None) parser.add_argument('--task', type=str, default=\"PickPlace\") parser.add_argument('--horizon', type=int, default=500) args = parser.parse_args()", "False torch.set_num_threads(1) args = parse_args() os.makedirs(os.path.join(args.data_dir, args.exp_name), exist_ok=True) with open(os.path.join(args.data_dir,", "args.exp_name = 'MTL_{}'.format(len(task_list)) return args def main(): torch.backends.cudnn.deterministic = True", "default=500) args = parser.parse_args() np.random.seed(args.seed) task_list = np.random.choice(256, num_procs(), replace=False)", "[\"IIWA\", \"Jaco\", \"Kinova3\", \"Panda\"] _objects = [\"Box\", \"Dumbbell\", \"Plate\", \"Hollowbox\"]", "num_procs(), replace=False) args.task_id = int(task_list[proc_id()]) _robots = [\"IIWA\", \"Jaco\", \"Kinova3\",", "\"Kinova3\", \"Panda\"] _objects = [\"Box\", \"Dumbbell\", \"Plate\", \"Hollowbox\"] _objectives =", "'args_{}.json'.format(proc_id())), 'w') as f: json.dump(args.__dict__, f, indent=2) logger_kwargs = setup_logger_kwargs(", "spinup.utils.run_utils import setup_logger_kwargs from spinup.utils.mpi_tools import proc_id, num_procs def parse_args():", "default=1e-4) parser.add_argument('--vf-lr', type=float, default=1e-4) parser.add_argument('--pi-iters', type=int, default=128) parser.add_argument('--vf-iters', type=int, default=128)", "args def main(): torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False torch.set_num_threads(1)", "args.exp_name + \"_robot:\" + str(args.robot) + \"_task:\" + str(args.task) +", "+ str(args.task_id) + \"_name:\" + args.exp_name + \"_robot:\" + str(args.robot)", "default='ppo') parser.add_argument('--clip', type=float, default=0.2) parser.add_argument('--pi-lr', type=float, default=1e-4) parser.add_argument('--vf-lr', type=float, default=1e-4)", "json import torch from spinup.algos.pytorch.ppo.core import MLPActorCritic from spinup.algos.pytorch.ppo.ppo import", "parser.add_argument('--horizon', type=int, default=500) args = parser.parse_args() np.random.seed(args.seed) task_list = np.random.choice(256,", "_objectives = [\"PickPlace\", \"Push\", \"Shelf\", \"Trashcan\"] _obstacles = [\"None\", \"GoalWall\",", "parser.add_argument('--cpu', type=int, default=4) parser.add_argument('--steps', type=int, default=16000) parser.add_argument('--epochs', type=int, default=625) parser.add_argument('--exp-name',", "use_task_id_obs=True), actor_critic=MLPActorCritic, ac_kwargs=dict(hidden_sizes=[args.hid]*args.l, log_std_init=args.log_std_init), seed=args.seed, gamma=args.gamma, steps_per_epoch=args.steps, epochs=args.epochs, clip_ratio=args.clip, pi_lr=args.pi_lr,", "\"GoalWall\", \"ObjectDoor\", \"ObjectWall\"] idx = np.unravel_index(args.task_id, (len(_robots), len(_objects), len(_objectives), len(_obstacles)))", "parser.add_argument('--hid', type=int, default=256) parser.add_argument('--l', type=int, default=2) parser.add_argument('--gamma', type=float, default=0.99) parser.add_argument('--seed',", "args = parse_args() os.makedirs(os.path.join(args.data_dir, args.exp_name), exist_ok=True) with open(os.path.join(args.data_dir, args.exp_name, 'args_{}.json'.format(proc_id())),", "int(task_list[proc_id()]) _robots = [\"IIWA\", \"Jaco\", \"Kinova3\", \"Panda\"] _objects = [\"Box\",", "+ str(args.task) + \"_object:\" + str(args.object) + \"_obstacle:\" + str(args.obstacle)", "= 'MTL_{}'.format(len(task_list)) return args def main(): torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark", "= parse_args() os.makedirs(os.path.join(args.data_dir, args.exp_name), exist_ok=True) with open(os.path.join(args.data_dir, args.exp_name, 'args_{}.json'.format(proc_id())), 'w')", "'w') as f: json.dump(args.__dict__, f, indent=2) logger_kwargs = setup_logger_kwargs( args.exp_name,", "args.task_id = int(task_list[proc_id()]) _robots = [\"IIWA\", \"Jaco\", \"Kinova3\", \"Panda\"] _objects", "os.makedirs(os.path.join(args.data_dir, args.exp_name), exist_ok=True) with open(os.path.join(args.data_dir, args.exp_name, 'args_{}.json'.format(proc_id())), 'w') as f:", "args.obstacle, args.task, args.controller, args.horizon, use_task_id_obs=True), actor_critic=MLPActorCritic, ac_kwargs=dict(hidden_sizes=[args.hid]*args.l, log_std_init=args.log_std_init), seed=args.seed, gamma=args.gamma,", "args.object = _objects[idx[1]] args.task = _objectives[idx[2]] args.obstacle = _obstacles[idx[3]] #", "type=int, default=500) args = parser.parse_args() np.random.seed(args.seed) task_list = np.random.choice(256, num_procs(),", "from spinup.utils.run_utils import setup_logger_kwargs from spinup.utils.mpi_tools import proc_id, num_procs def", "+ \"_name:\" + args.exp_name + \"_robot:\" + str(args.robot) + \"_task:\"", "json.dump(args.__dict__, f, indent=2) logger_kwargs = setup_logger_kwargs( args.exp_name, data_dir=args.data_dir) checkpoint =", "if args.load_dir is not None: checkpoint = torch.load(os.path.join(args.load_dir, 'pyt_save', 'state_dicts.pt'))", "default='spinningup_training/logs') parser.add_argument('--load-dir', default=None) parser.add_argument('--gridsearch-id', type=int, default=-1) parser.add_argument('--task-id', type=int, default=-1) parser.add_argument('--hid',", "type=float, default=1e-4) parser.add_argument('--pi-iters', type=int, default=128) parser.add_argument('--vf-iters', type=int, default=128) parser.add_argument('--target-kl', type=float,", "\"_task:\" + str(args.task) + \"_object:\" + str(args.object) + \"_obstacle:\" +", "type=float, default=0.02) parser.add_argument('--ent-coef', type=float, default=0.02) parser.add_argument('--log-std-init', type=float, default=0.) parser.add_argument('--controller', type=str,", "train_v_iters=args.vf_iters, target_kl=args.target_kl, logger_kwargs=logger_kwargs, max_ep_len=args.horizon, ent_coef=args.ent_coef, log_per_proc=True, checkpoint=checkpoint) if __name__ ==", "argparse.ArgumentParser() parser.add_argument('--data-dir', default='spinningup_training/logs') parser.add_argument('--load-dir', default=None) parser.add_argument('--gridsearch-id', type=int, default=-1) parser.add_argument('--task-id', type=int,", "not None: checkpoint = torch.load(os.path.join(args.load_dir, 'pyt_save', 'state_dicts.pt')) ppo(lambda: composition.make( args.robot,", "(len(_robots), len(_objects), len(_objectives), len(_obstacles))) args.robot = _robots[idx[0]] args.object = _objects[idx[1]]", "'state_dicts.pt')) ppo(lambda: composition.make( args.robot, args.object, args.obstacle, args.task, args.controller, args.horizon, use_task_id_obs=True),", "= [\"Box\", \"Dumbbell\", \"Plate\", \"Hollowbox\"] _objectives = [\"PickPlace\", \"Push\", \"Shelf\",", "as np import argparse import composition import os import json", "\"t:\" + str(args.task_id) + \"_name:\" + args.exp_name + \"_robot:\" +", "pi_lr=args.pi_lr, vf_lr=args.vf_lr, train_pi_iters=args.pi_iters, train_v_iters=args.vf_iters, target_kl=args.target_kl, logger_kwargs=logger_kwargs, max_ep_len=args.horizon, ent_coef=args.ent_coef, log_per_proc=True, checkpoint=checkpoint)", "args.task, args.controller, args.horizon, use_task_id_obs=True), actor_critic=MLPActorCritic, ac_kwargs=dict(hidden_sizes=[args.hid]*args.l, log_std_init=args.log_std_init), seed=args.seed, gamma=args.gamma, steps_per_epoch=args.steps,", "\"Jaco\", \"Kinova3\", \"Panda\"] _objects = [\"Box\", \"Dumbbell\", \"Plate\", \"Hollowbox\"] _objectives", "[\"Box\", \"Dumbbell\", \"Plate\", \"Hollowbox\"] _objectives = [\"PickPlace\", \"Push\", \"Shelf\", \"Trashcan\"]", "default=0.) parser.add_argument('--controller', type=str, default=\"joint\") parser.add_argument('--robot', type=str, default=\"IIWA\") parser.add_argument('--object', type=str, default=\"Hollowbox\")", "parser.add_argument('--exp-name', type=str, default='ppo') parser.add_argument('--clip', type=float, default=0.2) parser.add_argument('--pi-lr', type=float, default=1e-4) parser.add_argument('--vf-lr',", "str(args.task) + \"_object:\" + str(args.object) + \"_obstacle:\" + str(args.obstacle) args.exp_name", "def main(): torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False torch.set_num_threads(1) args", "args.exp_name, data_dir=args.data_dir) checkpoint = None if args.load_dir is not None:", "\"_obstacle:\" + str(args.obstacle) args.exp_name = 'MTL_{}'.format(len(task_list)) return args def main():", "parser.add_argument('--gridsearch-id', type=int, default=-1) parser.add_argument('--task-id', type=int, default=-1) parser.add_argument('--hid', type=int, default=256) parser.add_argument('--l',", "spinup.utils.mpi_tools import proc_id, num_procs def parse_args(): parser = argparse.ArgumentParser() parser.add_argument('--data-dir',", "type=int, default=4) parser.add_argument('--steps', type=int, default=16000) parser.add_argument('--epochs', type=int, default=625) parser.add_argument('--exp-name', type=str,", "steps_per_epoch=args.steps, epochs=args.epochs, clip_ratio=args.clip, pi_lr=args.pi_lr, vf_lr=args.vf_lr, train_pi_iters=args.pi_iters, train_v_iters=args.vf_iters, target_kl=args.target_kl, logger_kwargs=logger_kwargs, max_ep_len=args.horizon,", "from spinup.algos.pytorch.ppo.core import MLPActorCritic from spinup.algos.pytorch.ppo.ppo import ppo from spinup.utils.run_utils", "len(_obstacles))) args.robot = _robots[idx[0]] args.object = _objects[idx[1]] args.task = _objectives[idx[2]]", "seed=args.seed, gamma=args.gamma, steps_per_epoch=args.steps, epochs=args.epochs, clip_ratio=args.clip, pi_lr=args.pi_lr, vf_lr=args.vf_lr, train_pi_iters=args.pi_iters, train_v_iters=args.vf_iters, target_kl=args.target_kl,", "clip_ratio=args.clip, pi_lr=args.pi_lr, vf_lr=args.vf_lr, train_pi_iters=args.pi_iters, train_v_iters=args.vf_iters, target_kl=args.target_kl, logger_kwargs=logger_kwargs, max_ep_len=args.horizon, ent_coef=args.ent_coef, log_per_proc=True,", "checkpoint = torch.load(os.path.join(args.load_dir, 'pyt_save', 'state_dicts.pt')) ppo(lambda: composition.make( args.robot, args.object, args.obstacle,", "import numpy as np import argparse import composition import os", "= argparse.ArgumentParser() parser.add_argument('--data-dir', default='spinningup_training/logs') parser.add_argument('--load-dir', default=None) parser.add_argument('--gridsearch-id', type=int, default=-1) parser.add_argument('--task-id',", "+ \"_robot:\" + str(args.robot) + \"_task:\" + str(args.task) + \"_object:\"", "default=256) parser.add_argument('--l', type=int, default=2) parser.add_argument('--gamma', type=float, default=0.99) parser.add_argument('--seed', '-s', type=int,", "= [\"None\", \"GoalWall\", \"ObjectDoor\", \"ObjectWall\"] idx = np.unravel_index(args.task_id, (len(_robots), len(_objects)," ]
[ "f(): yield 1 g = f() res = g.next() assert", "object\") def test_throw_finishes_generator(self): def f(): yield 1 g = f()", "def f(): try: yield 1 v = (yield 2) except:", "set() def test_explicit_stop_iteration_unpackiterable(self): def f(): yield 1 raise StopIteration assert", "assert d['f'].x == 42 def test_generator_raises_typeerror(self): def f(): yield 1", "42 g = mygen() raises(TypeError, g.send, 2) raises(TypeError, g.send, 2)", "g.next) assert not g.gi_running assert g.gi_frame is None assert g.gi_code", "i in range(5))\" assert res == 30 def test_generator_expression_2(self): d", "g.next) def test_throw5(self): def f(): try: yield 1 except: x", "test_generator_restart(self): def g(): i = me.next() yield i me =", "g.gi_frame is not None assert not g.gi_running g.next() assert not", "g = f() g.next() # String exceptions are not allowed", "1 def test_generator2(self): def f(): yield 1 g = f()", "not g.gi_running assert g.gi_frame is None assert g.gi_code is f.__code__", "3)) assert set(g) == set([0, 1, 4, 9, 16, 25])", "allowed anymore raises(TypeError, g.throw, \"Error\") assert g.throw(Exception) == 3 raises(StopIteration,", "def test_generator_raises_typeerror(self): def f(): yield 1 g = f() raises(TypeError,", "g.throw, ValueError) assert g.gi_frame is None def test_throw_bug(self): def f():", "g.next() == 1 raises(StopIteration, g.next) def test_attributes(self): def f(): yield", "yield 1 g = f() assert g.next() == 1 raises(StopIteration,", "= sum(i*i for i in range(5))\" assert res == 30", "except NameError: yield 5 raise # should raise \"no active", "return total, x z = [1, 2, 7] res =", "def test_repr(self): def myFunc(): yield 1 g = myFunc() r", "= d['g'] assert g.next() == 1 assert g.next() == 2", "String exceptions are not allowed anymore raises(TypeError, g.throw, \"Error\") assert", "del g import gc gc.collect() assert d['f'].x == 42 def", "raises(NameError, g.throw, NameError, \"Error\", None) def test_throw_fail(self): def f(): yield", "sum(i*i for i in range(5))\" assert res == 30 def", "except: yield 3 g = f() assert g.next() == 1", "g = d['f']() g.next() del g import gc gc.collect() assert", "need to exec it, else it won't run on python2.4", "= f() assert g.gi_frame is not None raises(ValueError, g.throw, ValueError)", "set(g) == set([0, 1, 4, 9, 16, 25]) assert set(g)", "== [1] def test_throw_on_finished_generator(self): def f(): yield 1 g =", "def f(): yield 1 g = f() assert [x for", "g = f() assert g.next() == 1 raises(StopIteration, g.next) def", "= f() g.next() \"\"\" in d g = d['g'] assert", "== 3 raises(StopIteration, g.next) def test_throw4(self): d = {} exec", "import gc gc.collect() assert d['f'].x == 42 def test_generator_raises_typeerror(self): def", "def f(): try: yield finally: f.x = 42 \"\"\".strip() in", "def f(): yield 1 g = f() raises(TypeError, g.send) #", "yield 1 yield 2 except: yield 3 g = f()", "me.next) def test_generator_expression(self): exec \"res = sum(i*i for i in", "assert g.next() == 2 assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next)", "i in [x for x in z]) return total, x", "in d assert d['res'] == (10, 7) def test_repr(self): def", "1 assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next) def test_throw4(self): d", "it, else it won't run on python2.4 d = {}", "test_throw_fail3(self): def f(): yield 1 g = f() raises(TypeError, g.throw,", "[x for x in g] == [1] def test_generator_restart(self): def", "= 3 try: yield x except: pass g = f()", "assert tuple(f()) == (1,) def test_exception_is_cleared_by_yield(self): def f(): try: foobar", "2 g = f() g.next() raises(RuntimeError, g.close) def test_close_on_collect(self): ##", "it.next() g = f() assert [x for x in g]", "1 except GeneratorExit: raise NameError g = f() g.next() raises(NameError,", "{} exec \"\"\"if 1: def f(): v = (yield )", "x + 5 yield x + 6 return g.__code__ ''')", "g.close() is None def test_close2(self): def f(): try: yield 1", "f() g.next() assert g.close() is None def test_close3(self): def f():", "gc gc.collect() assert d['f'].x == 42 def test_generator_raises_typeerror(self): def f():", "g.send) # one argument required raises(TypeError, g.send, 1) # not", "argument required raises(TypeError, g.send, 1) # not started, must send", "g = f() raises(TypeError, g.throw, list()) def test_throw_fail3(self): def f():", "sum(i for i in [x for x in z]) return", "assert g.next() == 1 raises(StopIteration, g.next) def test_attributes(self): def f():", "g.gi_frame is not None raises(ValueError, g.throw, ValueError) assert g.gi_frame is", "g.__name__ == 'f' def test_generator3(self): def f(): yield 1 g", "= f() res = g.next() assert res == 1 raises(StopIteration,", "\"res = sum(i*i for i in range(5))\" assert res ==", "space.appexec([], '''(): def g(x): yield x + 5 yield x", "== (1,) def test_exception_is_cleared_by_yield(self): def f(): try: foobar except NameError:", "g = f() raises(TypeError, g.throw, NameError(\"Error\"), \"error\") def test_throw_fail2(self): def", "yield 1 except GeneratorExit: raise StopIteration g = f() g.next()", "GeneratorExit: raise StopIteration g = f() g.next() assert g.close() is", "except GeneratorExit: yield 2 g = f() g.next() raises(RuntimeError, g.close)", "[1] def test_generator_restart(self): def g(): i = me.next() yield i", "GeneratorExit: yield 2 g = f() g.next() raises(RuntimeError, g.close) def", "object myFunc at 0x\") assert list(g) == [1] assert repr(g)", "2 g = f() raises(NameError, g.throw, NameError, \"Error\", None) def", "raises(NameError, g.close) def test_close_fail(self): def f(): try: yield 1 except", "g = f() assert g.gi_frame is not None raises(ValueError, g.throw,", "yield 1 v = (yield 2) except: yield 3 g", "g = f() res = g.next() assert res == 1", "is f.__code__ assert g.__name__ == 'f' assert g.gi_frame is not", "try: yield x except: pass g = f() g.next() #", "AppTestGenerator: def test_generator(self): def f(): yield 1 assert f().next() ==", "yield 1 g = f() raises(TypeError, g.throw, NameError(\"Error\"), \"error\") def", "repr(g) assert r.startswith(\"<generator object myFunc at 0x\") assert list(g) ==", "won't run on python2.4 d = {} exec \"\"\" def", "in [x for x in z]) return total, x z", "yield 1 g = myFunc() r = repr(g) assert r.startswith(\"<generator", "yield x + 5 return g.__code__ ''') assert should_not_inline(w_co) ==", "list(g) == [1] def test_generator4(self): def f(): yield 1 g", "yield 1 g = f() assert [x for x in", "NameError(\"Error\"), \"error\") def test_throw_fail2(self): def f(): yield 1 g =", "assert not g.gi_running raises(StopIteration, g.next) assert not g.gi_running assert g.gi_frame", "def test_generator_propagate_stopiteration(self): def f(): it = iter([1]) while 1: yield", "= f() raises(TypeError, g.throw, list()) def test_throw_fail3(self): def f(): yield", "[1] def test_generator4(self): def f(): yield 1 g = f()", "g = f() raises(TypeError, g.throw, NameError(\"Error\"), None, \"not tb object\")", "f() g.next() raises(NameError, g.close) def test_close_fail(self): def f(): try: yield", "[x for x in g] == [1] def test_generator_propagate_stopiteration(self): def", "f() assert g.gi_code is f.__code__ assert g.__name__ == 'f' assert", "== set() assert set(i for i in range(0)) == set()", "= sum(i for i in [x for x in z])", "in z]) return total, x z = [1, 2, 7]", "d g = d['g'] assert g.send(42) == 42 def test_throw1(self):", "f() # two arguments version raises(NameError, g.throw, NameError, \"Error\") def", "yield 5 raise # should raise \"no active exception to", "test_should_not_inline(space): from pypy.interpreter.generator import should_not_inline w_co = space.appexec([], '''(): def", ") yield v g = f() g.next() \"\"\" in d", "is None def test_throw_bug(self): def f(): try: x.throw(IndexError) # =>", "g.__code__ ''') assert should_not_inline(w_co) == False w_co = space.appexec([], '''():", "d g = d['f']() g.next() del g import gc gc.collect()", "def test_throw3(self): def f(): try: yield 1 yield 2 except:", "g = f() raises(NameError, g.throw, NameError, \"Error\", None) def test_throw_fail(self):", "d = {} exec \"\"\"if 1: def f(): try: yield", "g.next() == 1 assert g.next() == 2 assert g.throw(NameError(\"Error\")) ==", "f(): try: yield 1 except GeneratorExit: raise NameError g =", "except TypeError: pass def test_multiple_invalid_sends(self): def mygen(): yield 42 g", "def test_generator_explicit_stopiteration(self): def f(): yield 1 raise StopIteration g =", "f(): try: yield 1 v = (yield 2) except: yield", "are not allowed anymore raises(TypeError, g.throw, \"Error\") assert g.throw(Exception) ==", "in d g = d['g'] assert g.next() == 1 assert", "g = (i*i for i in range(-5, 3)) assert set(g)", "at 0x\") assert list(g) == [1] assert repr(g) == r", "finally: f.x = 42 \"\"\".strip() in d g = d['f']()", "import should_not_inline w_co = space.appexec([], '''(): def g(x): yield x", "assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next) def test_throw5(self): def f():", "d['f'].x == 42 def test_generator_raises_typeerror(self): def f(): yield 1 g", "def f(): yield 1 g = f() raises(TypeError, g.throw, NameError(\"Error\"),", "[1] def test_generator_propagate_stopiteration(self): def f(): it = iter([1]) while 1:", "g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next) def test_throw4(self): d = {}", "25]) assert set(g) == set() assert set(i for i in", "1) # not started, must send None def test_generator_explicit_stopiteration(self): def", "def f(): it = iter([1]) while 1: yield it.next() g", "test_throw1(self): def f(): yield 2 g = f() # two", "1 g = f() raises(TypeError, g.send) # one argument required", "= f() assert g.next() == 1 raises(StopIteration, g.next) def test_attributes(self):", "for x in g] == [1] def test_generator5(self): d =", "i me = g() raises(ValueError, me.next) def test_generator_expression(self): exec \"res", "for i in range(5))\" assert res == 30 def test_generator_expression_2(self):", "g] == [1] def test_generator_propagate_stopiteration(self): def f(): it = iter([1])", "single argument version raises(NameError, g.throw, NameError(\"Error\")) def test_throw3(self): def f():", "g.throw, list()) def test_throw_fail3(self): def f(): yield 1 g =", "= {} exec \"\"\"if 1: def f(): try: yield 1", "not None assert not g.gi_running g.next() assert not g.gi_running raises(StopIteration,", "NameError) def test_close(self): def f(): yield 1 g = f()", "test_throw_finishes_generator(self): def f(): yield 1 g = f() assert g.gi_frame", "test_close(self): def f(): yield 1 g = f() assert g.close()", "assert r.startswith(\"<generator object myFunc at 0x\") assert list(g) == [1]", "def f(): try: yield 1 except GeneratorExit: raise NameError g", "= [1, 2, 7] res = f() \"\"\" in d", "= {} exec \"\"\"if 1: def f(): v = (yield", "\"\"\"if 1: def f(): try: yield 1 v = (yield", "raises(NameError, g.throw, NameError, \"Error\") def test_throw2(self): def f(): yield 2", "None raises(ValueError, g.throw, ValueError) assert g.gi_frame is None def test_throw_bug(self):", "5 yield x + 6 return g.__code__ ''') assert should_not_inline(w_co)", "assert [x for x in g] == [1] def test_generator5(self):", "TypeError: pass def test_multiple_invalid_sends(self): def mygen(): yield 42 g =", "== 3 raises(StopIteration, g.next) def test_throw5(self): def f(): try: yield", "argument version raises(NameError, g.throw, NameError(\"Error\")) def test_throw3(self): def f(): try:", "(10, 7) def test_repr(self): def myFunc(): yield 1 g =", "g = f() g.next() assert g.close() is None def test_close3(self):", "next(gen) # --> 5 try: next(gen) except TypeError: pass def", "g.throw(Exception) == 3 raises(StopIteration, g.throw, Exception) def test_throw6(self): def f():", "g.next() raises(RuntimeError, g.close) def test_close_on_collect(self): ## we need to exec", "is f.__code__ assert g.__name__ == 'f' def test_generator3(self): def f():", "def test_generator2(self): def f(): yield 1 g = f() assert", "= (yield ) yield v g = f() g.next() \"\"\"", "<filename>pypy/interpreter/test/test_generator.py class AppTestGenerator: def test_generator(self): def f(): yield 1 assert", "assert [x for x in g] == [1] def test_generator_restart(self):", "test_generator3(self): def f(): yield 1 g = f() assert list(g)", "g = f() assert g.next() == 1 assert g.throw(NameError(\"Error\")) ==", "= f() raises(TypeError, g.throw, NameError(\"Error\"), \"error\") def test_throw_fail2(self): def f():", "raise StopIteration g = f() g.next() assert g.close() is None", "= me.next() yield i me = g() raises(ValueError, me.next) def", "== [1] def test_generator4(self): def f(): yield 1 g =", "g.send, 1) # not started, must send None def test_generator_explicit_stopiteration(self):", "test_close_fail(self): def f(): try: yield 1 except GeneratorExit: yield 2", "def myFunc(): yield 1 g = myFunc() r = repr(g)", "assert d['res'] == (10, 7) def test_repr(self): def myFunc(): yield", "is not None raises(ValueError, g.throw, ValueError) assert g.gi_frame is None", "raises(TypeError, g.send, 2) def test_should_not_inline(space): from pypy.interpreter.generator import should_not_inline w_co", "g.next() # String exceptions are not allowed anymore raises(TypeError, g.throw,", "''') assert should_not_inline(w_co) == False w_co = space.appexec([], '''(): def", "g.next() \"\"\" in d g = d['g'] assert g.send(42) ==", "x = f() res = list(x) assert res == [1]", "g.throw, Exception) def test_throw6(self): def f(): yield 2 g =", "g.close() is None def test_close3(self): def f(): try: yield 1", "4, 9, 16, 25]) assert set(g) == set() assert set(i", "= g() raises(ValueError, me.next) def test_generator_expression(self): exec \"res = sum(i*i", "2) def test_should_not_inline(space): from pypy.interpreter.generator import should_not_inline w_co = space.appexec([],", "g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next) def test_throw5(self): def f(): try:", "g.throw, NameError(\"Error\"), \"error\") def test_throw_fail2(self): def f(): yield 1 g", "= {} exec \"\"\" def f(): total = sum(i for", "== set() def test_explicit_stop_iteration_unpackiterable(self): def f(): yield 1 raise StopIteration", "g(x): yield x + 5 return g.__code__ ''') assert should_not_inline(w_co)", "f(): try: yield 1 except GeneratorExit: raise StopIteration g =", "is None assert g.gi_code is f.__code__ assert g.__name__ == 'f'", "g.gi_running g.next() assert not g.gi_running raises(StopIteration, g.next) assert not g.gi_running", "g.next() raises(NameError, g.close) def test_close_fail(self): def f(): try: yield 1", "def test_generator3(self): def f(): yield 1 g = f() assert", "\"not tb object\") def test_throw_finishes_generator(self): def f(): yield 1 g", "def g(x): yield x + 5 yield x + 6", "except GeneratorExit: raise NameError g = f() g.next() raises(NameError, g.close)", "== 1 assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next) def test_throw4(self):", "assert g.close() is None def test_close3(self): def f(): try: yield", "g] == [1] def test_generator_restart(self): def g(): i = me.next()", "g.send, 2) def test_should_not_inline(space): from pypy.interpreter.generator import should_not_inline w_co =", "x = 3 try: yield x except: pass g =", "5 return g.__code__ ''') assert should_not_inline(w_co) == False w_co =", "test_generator2(self): def f(): yield 1 g = f() assert g.next()", "\"\"\"if 1: def f(): v = (yield ) yield v", "[1] def test_generator5(self): d = {} exec \"\"\"if 1: def", "test_throw4(self): d = {} exec \"\"\"if 1: def f(): try:", "g.next() == 1 assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next) def", "= f() \"\"\" in d g = d['g'] assert g.next()", "anymore raises(TypeError, g.throw, \"Error\") assert g.throw(Exception) == 3 raises(StopIteration, g.throw,", "\"\"\" in d g = d['g'] assert g.next() == 1", "f.__code__ assert g.__name__ == 'f' def test_generator3(self): def f(): yield", "for i in range(0)) == set() def test_explicit_stop_iteration_unpackiterable(self): def f():", "yield 1 assert g.gi_running g = f() assert g.gi_code is", "raises(ValueError, g.throw, ValueError) assert g.gi_frame is None def test_throw_bug(self): def", "repr(g) == r def test_unpackiterable_gen(self): g = (i*i for i", "f(): yield 1 g = f() raises(TypeError, g.throw, list()) def", "re-raise\" gen = f() next(gen) # --> 5 try: next(gen)", "yield 1 except GeneratorExit: raise NameError g = f() g.next()", "test_close2(self): def f(): try: yield 1 except GeneratorExit: raise StopIteration", "def f(): try: foobar except NameError: yield 5 raise #", "3 g = f() \"\"\" in d g = d['g']", "f(): try: x.throw(IndexError) # => \"generator already executing\" except ValueError:", "raises(TypeError, g.throw, \"Error\") assert g.throw(Exception) == 3 raises(StopIteration, g.throw, Exception)", "def f(): yield 1 g = f() assert list(g) ==", "set() assert set(i for i in range(0)) == set() def", "== r def test_unpackiterable_gen(self): g = (i*i for i in", "test_generator_explicit_stopiteration(self): def f(): yield 1 raise StopIteration g = f()", "raise NameError g = f() g.next() raises(NameError, g.close) def test_close_fail(self):", "in g] == [1] def test_generator5(self): d = {} exec", "list()) def test_throw_fail3(self): def f(): yield 1 g = f()", "f(): yield 1 g = f() assert g.next() == 1", "g.close) def test_close_on_collect(self): ## we need to exec it, else", "= f() g.next() raises(RuntimeError, g.close) def test_close_on_collect(self): ## we need", "not None raises(ValueError, g.throw, ValueError) assert g.gi_frame is None def", "g = f() assert g.gi_code is f.__code__ assert g.__name__ ==", "test_throw_bug(self): def f(): try: x.throw(IndexError) # => \"generator already executing\"", "g = f() \"\"\" in d g = d['g'] assert", "\"\"\" in d assert d['res'] == (10, 7) def test_repr(self):", "1: yield it.next() g = f() assert [x for x", "g.next) raises(NameError, g.throw, NameError) def test_close(self): def f(): yield 1", "def mygen(): yield 42 g = mygen() raises(TypeError, g.send, 2)", "def test_throw5(self): def f(): try: yield 1 except: x =", "test_generator_raises_typeerror(self): def f(): yield 1 g = f() raises(TypeError, g.send)", "yield 1 g = f() raises(TypeError, g.throw, list()) def test_throw_fail3(self):", "d = {} exec \"\"\"if 1: def f(): v =", "test_generator5(self): d = {} exec \"\"\"if 1: def f(): v", "== 1 assert g.next() == 2 assert g.throw(NameError(\"Error\")) == 3", "g.next) def test_attributes(self): def f(): yield 1 assert g.gi_running g", "yield 1 except: x = 3 try: yield x except:", "res = f() \"\"\" in d assert d['res'] == (10,", "assert g.close() is None def test_close2(self): def f(): try: yield", "def test_throw1(self): def f(): yield 2 g = f() #", "test_generator(self): def f(): yield 1 assert f().next() == 1 def", "f(): yield 1 g = f() assert [x for x", "def f(): v = (yield ) yield v g =", "f() g.next() # String exceptions are not allowed anymore raises(TypeError,", "assert set(g) == set([0, 1, 4, 9, 16, 25]) assert", "2 g = f() # two arguments version raises(NameError, g.throw,", "f(): yield 1 g = f() raises(TypeError, g.send) # one", "d g = d['g'] assert g.next() == 1 assert g.next()", "me.next() yield i me = g() raises(ValueError, me.next) def test_generator_expression(self):", "g.gi_running g = f() assert g.gi_code is f.__code__ assert g.__name__", "42 def test_throw1(self): def f(): yield 2 g = f()", "= f() g.next() raises(NameError, g.close) def test_close_fail(self): def f(): try:", "f(): yield 1 raise StopIteration g = f() assert [x", "z]) return total, x z = [1, 2, 7] res", "yield 2 g = f() raises(NameError, g.throw, NameError, \"Error\", None)", "x + 5 return g.__code__ ''') assert should_not_inline(w_co) == False", "1 yield 2 except: yield 3 g = f() assert", "exec it, else it won't run on python2.4 d =", "== 1 def test_generator2(self): def f(): yield 1 g =", "def test_close_on_collect(self): ## we need to exec it, else it", "f() # single argument version raises(NameError, g.throw, NameError(\"Error\")) def test_throw3(self):", "assert g.__name__ == 'f' def test_generator3(self): def f(): yield 1", "assert res == 30 def test_generator_expression_2(self): d = {} exec", "should raise \"no active exception to re-raise\" gen = f()", "\"\"\" in d g = d['g'] assert g.send(42) == 42", "raises(ValueError, me.next) def test_generator_expression(self): exec \"res = sum(i*i for i", "yield 42 g = mygen() raises(TypeError, g.send, 2) raises(TypeError, g.send,", "f() g.next() \"\"\" in d g = d['g'] assert g.send(42)", "== set([0, 1, 4, 9, 16, 25]) assert set(g) ==", "2 assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next) def test_throw5(self): def", "ValueError: yield 1 x = f() res = list(x) assert", "yield 1 raise StopIteration g = f() assert [x for", "NameError: yield 5 raise # should raise \"no active exception", "== 1 raises(StopIteration, g.next) raises(NameError, g.throw, NameError) def test_close(self): def", "g = d['g'] assert g.send(42) == 42 def test_throw1(self): def", "f(): yield 1 g = f() raises(TypeError, g.throw, NameError(\"Error\"), None,", "try: yield finally: f.x = 42 \"\"\".strip() in d g", "assert g.gi_frame is None def test_throw_bug(self): def f(): try: x.throw(IndexError)", "f(): yield 1 g = f() assert g.close() is None", "7) def test_repr(self): def myFunc(): yield 1 g = myFunc()", "d = {} exec \"\"\" def f(): try: yield finally:", "assert g.gi_frame is not None raises(ValueError, g.throw, ValueError) assert g.gi_frame", "assert repr(g) == r def test_unpackiterable_gen(self): g = (i*i for", "= f() assert [x for x in g] == [1]", "def test_close_fail(self): def f(): try: yield 1 except GeneratorExit: yield", "assert g.gi_code is f.__code__ assert g.__name__ == 'f' assert g.gi_frame", "= f() assert g.next() == 1 assert g.throw(NameError(\"Error\")) == 3", "res == 1 raises(StopIteration, g.next) raises(NameError, g.throw, NameError) def test_close(self):", "None assert not g.gi_running g.next() assert not g.gi_running raises(StopIteration, g.next)", "yield finally: f.x = 42 \"\"\".strip() in d g =", "def f(): try: yield 1 yield 2 except: yield 3", "f() assert g.next() == 1 raises(StopIteration, g.next) def test_attributes(self): def", "iter([1]) while 1: yield it.next() g = f() assert [x", "total = sum(i for i in [x for x in", "f(): v = (yield ) yield v g = f()", "g.gi_running assert g.gi_frame is None assert g.gi_code is f.__code__ assert", "yield 2 g = f() g.next() raises(RuntimeError, g.close) def test_close_on_collect(self):", "g = d['g'] assert g.next() == 1 assert g.next() ==", "\"generator already executing\" except ValueError: yield 1 x = f()", "3 try: yield x except: pass g = f() g.next()", "def test_throw_fail3(self): def f(): yield 1 g = f() raises(TypeError,", "g.throw, NameError(\"Error\"), None, \"not tb object\") def test_throw_finishes_generator(self): def f():", "g.next() == 2 assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next) def", "version raises(NameError, g.throw, NameError, \"Error\") def test_throw2(self): def f(): yield", "# should raise \"no active exception to re-raise\" gen =", "should_not_inline(w_co) == False w_co = space.appexec([], '''(): def g(x): yield", "def f(): yield 1 g = f() raises(TypeError, g.throw, list())", "should_not_inline w_co = space.appexec([], '''(): def g(x): yield x +", "send None def test_generator_explicit_stopiteration(self): def f(): yield 1 raise StopIteration", "= f() raises(TypeError, g.throw, NameError(\"Error\"), None, \"not tb object\") def", "None assert g.gi_code is f.__code__ assert g.__name__ == 'f' def", "== [1] def test_generator5(self): d = {} exec \"\"\"if 1:", "f() raises(TypeError, g.throw, NameError(\"Error\"), \"error\") def test_throw_fail2(self): def f(): yield", "def f(): try: yield 1 except: x = 3 try:", "\"no active exception to re-raise\" gen = f() next(gen) #", "= f() assert list(g) == [1] def test_generator4(self): def f():", "[1, 2, 7] res = f() \"\"\" in d assert", "d['res'] == (10, 7) def test_repr(self): def myFunc(): yield 1", "test_unpackiterable_gen(self): g = (i*i for i in range(-5, 3)) assert", "= (yield 2) except: yield 3 g = f() \"\"\"", "raises(StopIteration, g.next) def test_throw5(self): def f(): try: yield 1 except:", "def test_exception_is_cleared_by_yield(self): def f(): try: foobar except NameError: yield 5", "def f(): yield 1 raise StopIteration g = f() assert", "raises(StopIteration, g.next) raises(NameError, g.throw, NameError) def test_close(self): def f(): yield", "in d g = d['g'] assert g.send(42) == 42 def", "f(): yield 1 assert f().next() == 1 def test_generator2(self): def", "test_throw3(self): def f(): try: yield 1 yield 2 except: yield", "0x\") assert list(g) == [1] assert repr(g) == r def", "list(g) == [1] assert repr(g) == r def test_unpackiterable_gen(self): g", "test_attributes(self): def f(): yield 1 assert g.gi_running g = f()", "yield i me = g() raises(ValueError, me.next) def test_generator_expression(self): exec", "f() assert list(g) == [1] def test_generator4(self): def f(): yield", "tb object\") def test_throw_finishes_generator(self): def f(): yield 1 g =", "not started, must send None def test_generator_explicit_stopiteration(self): def f(): yield", "1 g = f() assert [x for x in g]", "2 except: yield 3 g = f() assert g.next() ==", "f(): yield 1 assert g.gi_running g = f() assert g.gi_code", "for i in [x for x in z]) return total,", "assert g.next() == 1 assert g.next() == 2 assert g.throw(NameError(\"Error\"))", "g.next) def test_throw4(self): d = {} exec \"\"\"if 1: def", "# String exceptions are not allowed anymore raises(TypeError, g.throw, \"Error\")", "NameError, \"Error\", None) def test_throw_fail(self): def f(): yield 1 g", "g.throw, \"Error\") assert g.throw(Exception) == 3 raises(StopIteration, g.throw, Exception) def", "def f(): yield 1 g = f() assert g.next() ==", "try: yield 1 v = (yield 2) except: yield 3", "raises(TypeError, g.send, 1) # not started, must send None def", "for x in g] == [1] def test_generator_restart(self): def g():", "assert list(g) == [1] def test_generator4(self): def f(): yield 1", "[x for x in z]) return total, x z =", "raises(TypeError, g.send, 2) raises(TypeError, g.send, 2) def test_should_not_inline(space): from pypy.interpreter.generator", "it won't run on python2.4 d = {} exec \"\"\"", "already executing\" except ValueError: yield 1 x = f() res", "raises(RuntimeError, g.close) def test_close_on_collect(self): ## we need to exec it,", "v g = f() g.next() \"\"\" in d g =", "f() assert g.gi_frame is not None raises(ValueError, g.throw, ValueError) assert", "5 raise # should raise \"no active exception to re-raise\"", "test_close3(self): def f(): try: yield 1 except GeneratorExit: raise NameError", "7] res = f() \"\"\" in d assert d['res'] ==", "g.__name__ == 'f' assert g.gi_frame is not None assert not", "assert list(g) == [1] assert repr(g) == r def test_unpackiterable_gen(self):", "test_generator_propagate_stopiteration(self): def f(): it = iter([1]) while 1: yield it.next()", "f(): yield 1 raise StopIteration assert tuple(f()) == (1,) def", "g(): i = me.next() yield i me = g() raises(ValueError,", "list(x) assert res == [1] def test_throw_on_finished_generator(self): def f(): yield", "def f(): yield 2 g = f() # single argument", "16, 25]) assert set(g) == set() assert set(i for i", "raises(StopIteration, g.throw, Exception) def test_throw6(self): def f(): yield 2 g", "it = iter([1]) while 1: yield it.next() g = f()", "r def test_unpackiterable_gen(self): g = (i*i for i in range(-5,", "1 except: x = 3 try: yield x except: pass", "g = f() g.next() raises(NameError, g.close) def test_close_fail(self): def f():", "z = [1, 2, 7] res = f() \"\"\" in", "yield 1 g = f() assert g.gi_frame is not None", "f() g.next() raises(RuntimeError, g.close) def test_close_on_collect(self): ## we need to", "3 raises(StopIteration, g.next) def test_throw5(self): def f(): try: yield 1", "not g.gi_running raises(StopIteration, g.next) assert not g.gi_running assert g.gi_frame is", "exec \"res = sum(i*i for i in range(5))\" assert res", "yield it.next() g = f() assert [x for x in", "except: x = 3 try: yield x except: pass g", "== [1] def test_generator_propagate_stopiteration(self): def f(): it = iter([1]) while", "raise # should raise \"no active exception to re-raise\" gen", "= (i*i for i in range(-5, 3)) assert set(g) ==", "x z = [1, 2, 7] res = f() \"\"\"", "g(x): yield x + 5 yield x + 6 return", "g = f() assert list(g) == [1] def test_generator4(self): def", "r = repr(g) assert r.startswith(\"<generator object myFunc at 0x\") assert", "g.throw, NameError, \"Error\", None) def test_throw_fail(self): def f(): yield 1", "executing\" except ValueError: yield 1 x = f() res =", "test_repr(self): def myFunc(): yield 1 g = myFunc() r =", "raises(NameError, g.throw, NameError(\"Error\")) def test_throw3(self): def f(): try: yield 1", "== 'f' assert g.gi_frame is not None assert not g.gi_running", "f() assert [x for x in g] == [1] def", "try: next(gen) except TypeError: pass def test_multiple_invalid_sends(self): def mygen(): yield", "== 'f' def test_generator3(self): def f(): yield 1 g =", "except GeneratorExit: raise StopIteration g = f() g.next() assert g.close()", "1 except GeneratorExit: raise StopIteration g = f() g.next() assert", "f(): yield 2 g = f() # two arguments version", "not allowed anymore raises(TypeError, g.throw, \"Error\") assert g.throw(Exception) == 3", "mygen(): yield 42 g = mygen() raises(TypeError, g.send, 2) raises(TypeError,", "assert g.next() == 1 assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next)", "None, \"not tb object\") def test_throw_finishes_generator(self): def f(): yield 1", "for x in z]) return total, x z = [1,", "g = f() # single argument version raises(NameError, g.throw, NameError(\"Error\"))", "test_generator4(self): def f(): yield 1 g = f() assert [x", "assert g.throw(Exception) == 3 raises(StopIteration, g.throw, Exception) def test_throw6(self): def", "g.close) def test_close_fail(self): def f(): try: yield 1 except GeneratorExit:", "## we need to exec it, else it won't run", "res = list(x) assert res == [1] def test_throw_on_finished_generator(self): def", "def g(x): yield x + 5 return g.__code__ ''') assert", "raises(TypeError, g.send) # one argument required raises(TypeError, g.send, 1) #", "x in g] == [1] def test_generator_restart(self): def g(): i", "d['g'] assert g.next() == 1 assert g.next() == 2 assert", "== 3 raises(StopIteration, g.throw, Exception) def test_throw6(self): def f(): yield", "= 42 \"\"\".strip() in d g = d['f']() g.next() del", "\"\"\".strip() in d g = d['f']() g.next() del g import", "def test_throw6(self): def f(): yield 2 g = f() raises(NameError,", "f() raises(NameError, g.throw, NameError, \"Error\", None) def test_throw_fail(self): def f():", "required raises(TypeError, g.send, 1) # not started, must send None", "(yield 2) except: yield 3 g = f() \"\"\" in", "assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next) def test_throw4(self): d =", "space.appexec([], '''(): def g(x): yield x + 5 return g.__code__", "def g(): i = me.next() yield i me = g()", "set(i for i in range(0)) == set() def test_explicit_stop_iteration_unpackiterable(self): def", "d['f']() g.next() del g import gc gc.collect() assert d['f'].x ==", "raises(TypeError, g.throw, list()) def test_throw_fail3(self): def f(): yield 1 g", "f() res = g.next() assert res == 1 raises(StopIteration, g.next)", "StopIteration g = f() g.next() assert g.close() is None def", "def test_generator(self): def f(): yield 1 assert f().next() == 1", "raises(StopIteration, g.next) def test_throw4(self): d = {} exec \"\"\"if 1:", "f(): try: yield 1 yield 2 except: yield 3 g", "raise StopIteration assert tuple(f()) == (1,) def test_exception_is_cleared_by_yield(self): def f():", "def test_throw_on_finished_generator(self): def f(): yield 1 g = f() res", "3 g = f() assert g.next() == 1 assert g.throw(NameError(\"Error\"))", "StopIteration assert tuple(f()) == (1,) def test_exception_is_cleared_by_yield(self): def f(): try:", "myFunc(): yield 1 g = myFunc() r = repr(g) assert", "def f(): yield 1 raise StopIteration assert tuple(f()) == (1,)", "f(): yield 2 g = f() # single argument version", "NameError(\"Error\"), None, \"not tb object\") def test_throw_finishes_generator(self): def f(): yield", "test_throw_fail(self): def f(): yield 1 g = f() raises(TypeError, g.throw,", "assert res == 1 raises(StopIteration, g.next) raises(NameError, g.throw, NameError) def", "class AppTestGenerator: def test_generator(self): def f(): yield 1 assert f().next()", "1 assert g.next() == 2 assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration,", "tuple(f()) == (1,) def test_exception_is_cleared_by_yield(self): def f(): try: foobar except", "== 2 assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration, g.next) def test_throw5(self):", "--> 5 try: next(gen) except TypeError: pass def test_multiple_invalid_sends(self): def", "1 g = f() assert g.close() is None def test_close2(self):", "2) raises(TypeError, g.send, 2) def test_should_not_inline(space): from pypy.interpreter.generator import should_not_inline", "= f() res = list(x) assert res == [1] def", "(yield ) yield v g = f() g.next() \"\"\" in", "x except: pass g = f() g.next() # String exceptions", "test_throw_fail2(self): def f(): yield 1 g = f() raises(TypeError, g.throw,", "42 \"\"\".strip() in d g = d['f']() g.next() del g", "False w_co = space.appexec([], '''(): def g(x): yield x +", "pass g = f() g.next() # String exceptions are not", "'f' def test_generator3(self): def f(): yield 1 g = f()", "ValueError) assert g.gi_frame is None def test_throw_bug(self): def f(): try:", "v = (yield ) yield v g = f() g.next()", "1 g = f() raises(TypeError, g.throw, NameError(\"Error\"), None, \"not tb", "arguments version raises(NameError, g.throw, NameError, \"Error\") def test_throw2(self): def f():", "in d g = d['f']() g.next() del g import gc", "1, 4, 9, 16, 25]) assert set(g) == set() assert", "def f(): yield 1 assert g.gi_running g = f() assert", "raise \"no active exception to re-raise\" gen = f() next(gen)", "started, must send None def test_generator_explicit_stopiteration(self): def f(): yield 1", "def test_generator4(self): def f(): yield 1 g = f() assert", "f(): try: foobar except NameError: yield 5 raise # should", "None def test_close3(self): def f(): try: yield 1 except GeneratorExit:", "except ValueError: yield 1 x = f() res = list(x)", "from pypy.interpreter.generator import should_not_inline w_co = space.appexec([], '''(): def g(x):", "= g.next() assert res == 1 raises(StopIteration, g.next) raises(NameError, g.throw,", "def test_multiple_invalid_sends(self): def mygen(): yield 42 g = mygen() raises(TypeError,", "test_throw2(self): def f(): yield 2 g = f() # single", "raises(StopIteration, g.next) def test_attributes(self): def f(): yield 1 assert g.gi_running", "f() assert g.next() == 1 assert g.throw(NameError(\"Error\")) == 3 raises(StopIteration,", "pass def test_multiple_invalid_sends(self): def mygen(): yield 42 g = mygen()", "g = f() # two arguments version raises(NameError, g.throw, NameError,", "== 1 raises(StopIteration, g.next) def test_attributes(self): def f(): yield 1", "version raises(NameError, g.throw, NameError(\"Error\")) def test_throw3(self): def f(): try: yield", "1: def f(): v = (yield ) yield v g", "f(): yield 1 g = f() raises(TypeError, g.throw, NameError(\"Error\"), \"error\")", "def f(): try: x.throw(IndexError) # => \"generator already executing\" except", "= iter([1]) while 1: yield it.next() g = f() assert", "# two arguments version raises(NameError, g.throw, NameError, \"Error\") def test_throw2(self):", "to re-raise\" gen = f() next(gen) # --> 5 try:", "assert res == [1] def test_throw_on_finished_generator(self): def f(): yield 1", "i in range(-5, 3)) assert set(g) == set([0, 1, 4,", "active exception to re-raise\" gen = f() next(gen) # -->", "in range(-5, 3)) assert set(g) == set([0, 1, 4, 9,", "assert not g.gi_running g.next() assert not g.gi_running raises(StopIteration, g.next) assert", "def test_throw_fail(self): def f(): yield 1 g = f() raises(TypeError,", "= d['f']() g.next() del g import gc gc.collect() assert d['f'].x", "+ 5 yield x + 6 return g.__code__ ''') assert", "3 raises(StopIteration, g.next) def test_throw4(self): d = {} exec \"\"\"if", "yield x except: pass g = f() g.next() # String", "g.gi_frame is None assert g.gi_code is f.__code__ assert g.__name__ ==", "\"error\") def test_throw_fail2(self): def f(): yield 1 g = f()", "def f(): try: yield 1 except GeneratorExit: yield 2 g", "x in g] == [1] def test_generator_propagate_stopiteration(self): def f(): it", "{} exec \"\"\" def f(): total = sum(i for i", "== False w_co = space.appexec([], '''(): def g(x): yield x", "[1] def test_throw_on_finished_generator(self): def f(): yield 1 g = f()", "g.throw, NameError) def test_close(self): def f(): yield 1 g =", "f() res = list(x) assert res == [1] def test_throw_on_finished_generator(self):", "None) def test_throw_fail(self): def f(): yield 1 g = f()", "g.gi_running raises(StopIteration, g.next) assert not g.gi_running assert g.gi_frame is None", "= myFunc() r = repr(g) assert r.startswith(\"<generator object myFunc at", "f() \"\"\" in d assert d['res'] == (10, 7) def", "def test_close(self): def f(): yield 1 g = f() assert", "to exec it, else it won't run on python2.4 d", "def test_should_not_inline(space): from pypy.interpreter.generator import should_not_inline w_co = space.appexec([], '''():", "{} exec \"\"\" def f(): try: yield finally: f.x =", "1 g = f() assert g.next() == 1 raises(StopIteration, g.next)", "raises(TypeError, g.throw, NameError(\"Error\"), \"error\") def test_throw_fail2(self): def f(): yield 1", "g.gi_code is f.__code__ assert g.__name__ == 'f' assert g.gi_frame is", "f(): yield 2 g = f() raises(NameError, g.throw, NameError, \"Error\",", "range(-5, 3)) assert set(g) == set([0, 1, 4, 9, 16,", "set(g) == set() assert set(i for i in range(0)) ==", "= f() g.next() assert g.close() is None def test_close3(self): def", "gen = f() next(gen) # --> 5 try: next(gen) except", "f() next(gen) # --> 5 try: next(gen) except TypeError: pass", "= d['g'] assert g.send(42) == 42 def test_throw1(self): def f():", "range(0)) == set() def test_explicit_stop_iteration_unpackiterable(self): def f(): yield 1 raise", "f.__code__ assert g.__name__ == 'f' assert g.gi_frame is not None", "{} exec \"\"\"if 1: def f(): try: yield 1 v", "x + 6 return g.__code__ ''') assert should_not_inline(w_co) == True", "assert g.gi_code is f.__code__ assert g.__name__ == 'f' def test_generator3(self):", "yield 2 g = f() # single argument version raises(NameError,", "f() raises(TypeError, g.throw, NameError(\"Error\"), None, \"not tb object\") def test_throw_finishes_generator(self):", "total, x z = [1, 2, 7] res = f()", "= space.appexec([], '''(): def g(x): yield x + 5 yield", "i in range(0)) == set() def test_explicit_stop_iteration_unpackiterable(self): def f(): yield", "1: def f(): try: yield 1 v = (yield 2)", "def f(): yield 1 g = f() res = g.next()", "1 raise StopIteration g = f() assert [x for x", "try: foobar except NameError: yield 5 raise # should raise", "g.send(42) == 42 def test_throw1(self): def f(): yield 2 g", "on python2.4 d = {} exec \"\"\" def f(): try:", "assert set(i for i in range(0)) == set() def test_explicit_stop_iteration_unpackiterable(self):", "NameError, \"Error\") def test_throw2(self): def f(): yield 2 g =", "f(): try: yield 1 except: x = 3 try: yield", "for i in range(-5, 3)) assert set(g) == set([0, 1,", "g] == [1] def test_generator5(self): d = {} exec \"\"\"if", "assert g.send(42) == 42 def test_throw1(self): def f(): yield 2", "exec \"\"\" def f(): total = sum(i for i in", "def test_unpackiterable_gen(self): g = (i*i for i in range(-5, 3))", "exceptions are not allowed anymore raises(TypeError, g.throw, \"Error\") assert g.throw(Exception)", "30 def test_generator_expression_2(self): d = {} exec \"\"\" def f():", "def test_throw_bug(self): def f(): try: x.throw(IndexError) # => \"generator already", "# not started, must send None def test_generator_explicit_stopiteration(self): def f():", "=> \"generator already executing\" except ValueError: yield 1 x =", "[1] assert repr(g) == r def test_unpackiterable_gen(self): g = (i*i", "assert g.gi_running g = f() assert g.gi_code is f.__code__ assert", "x in z]) return total, x z = [1, 2,", "try: yield 1 except GeneratorExit: raise StopIteration g = f()", "d assert d['res'] == (10, 7) def test_repr(self): def myFunc():", "res = g.next() assert res == 1 raises(StopIteration, g.next) raises(NameError,", "GeneratorExit: raise NameError g = f() g.next() raises(NameError, g.close) def", "res == 30 def test_generator_expression_2(self): d = {} exec \"\"\"", "= {} exec \"\"\" def f(): try: yield finally: f.x", "= f() raises(TypeError, g.send) # one argument required raises(TypeError, g.send,", "== 42 def test_generator_raises_typeerror(self): def f(): yield 1 g =", "5 try: next(gen) except TypeError: pass def test_multiple_invalid_sends(self): def mygen():", "\"\"\" def f(): total = sum(i for i in [x", "me = g() raises(ValueError, me.next) def test_generator_expression(self): exec \"res =", "Exception) def test_throw6(self): def f(): yield 2 g = f()", "not g.gi_running g.next() assert not g.gi_running raises(StopIteration, g.next) assert not", "gc.collect() assert d['f'].x == 42 def test_generator_raises_typeerror(self): def f(): yield", "f(): it = iter([1]) while 1: yield it.next() g =", "assert should_not_inline(w_co) == False w_co = space.appexec([], '''(): def g(x):", "raises(NameError, g.throw, NameError) def test_close(self): def f(): yield 1 g", "= f() raises(NameError, g.throw, NameError, \"Error\", None) def test_throw_fail(self): def", "python2.4 d = {} exec \"\"\" def f(): try: yield", "yield 1 g = f() raises(TypeError, g.throw, NameError(\"Error\"), None, \"not", "None def test_throw_bug(self): def f(): try: x.throw(IndexError) # => \"generator", "= f() # single argument version raises(NameError, g.throw, NameError(\"Error\")) def", "myFunc() r = repr(g) assert r.startswith(\"<generator object myFunc at 0x\")", "d = {} exec \"\"\" def f(): total = sum(i", "'f' assert g.gi_frame is not None assert not g.gi_running g.next()", "is None def test_close2(self): def f(): try: yield 1 except", "g = f() g.next() raises(RuntimeError, g.close) def test_close_on_collect(self): ## we", "exec \"\"\"if 1: def f(): v = (yield ) yield", "def test_attributes(self): def f(): yield 1 assert g.gi_running g =", "next(gen) except TypeError: pass def test_multiple_invalid_sends(self): def mygen(): yield 42", "test_throw_on_finished_generator(self): def f(): yield 1 g = f() res =", "# one argument required raises(TypeError, g.send, 1) # not started,", "test_generator_expression(self): exec \"res = sum(i*i for i in range(5))\" assert", "yield x + 6 return g.__code__ ''') assert should_not_inline(w_co) ==", "try: yield 1 except GeneratorExit: yield 2 g = f()", "except: pass g = f() g.next() # String exceptions are", "test_exception_is_cleared_by_yield(self): def f(): try: foobar except NameError: yield 5 raise", "one argument required raises(TypeError, g.send, 1) # not started, must", "exec \"\"\"if 1: def f(): try: yield 1 v =", "test_throw6(self): def f(): yield 2 g = f() raises(NameError, g.throw,", "g.throw, NameError(\"Error\")) def test_throw3(self): def f(): try: yield 1 yield", "in g] == [1] def test_generator_propagate_stopiteration(self): def f(): it =", "exec \"\"\" def f(): try: yield finally: f.x = 42", "r.startswith(\"<generator object myFunc at 0x\") assert list(g) == [1] assert", "try: yield 1 yield 2 except: yield 3 g =", "NameError g = f() g.next() raises(NameError, g.close) def test_close_fail(self): def", "def f(): yield 2 g = f() raises(NameError, g.throw, NameError,", "assert g.__name__ == 'f' assert g.gi_frame is not None assert", "\"\"\" def f(): try: yield finally: f.x = 42 \"\"\".strip()", "= f() \"\"\" in d assert d['res'] == (10, 7)", "assert f().next() == 1 def test_generator2(self): def f(): yield 1", "assert not g.gi_running assert g.gi_frame is None assert g.gi_code is", "def f(): yield 1 g = f() assert g.close() is", "g.next() assert g.close() is None def test_close3(self): def f(): try:", "in range(0)) == set() def test_explicit_stop_iteration_unpackiterable(self): def f(): yield 1", "f() assert g.close() is None def test_close2(self): def f(): try:", "i = me.next() yield i me = g() raises(ValueError, me.next)", "f(): try: yield 1 except GeneratorExit: yield 2 g =", "f(): yield 1 g = f() assert g.gi_frame is not", "1 g = f() assert g.gi_frame is not None raises(ValueError,", "raises(TypeError, g.throw, NameError(\"Error\"), None, \"not tb object\") def test_throw_finishes_generator(self): def", "== 30 def test_generator_expression_2(self): d = {} exec \"\"\" def", "f() raises(TypeError, g.throw, list()) def test_throw_fail3(self): def f(): yield 1", "g.gi_frame is None def test_throw_bug(self): def f(): try: x.throw(IndexError) #", "1 raises(StopIteration, g.next) def test_attributes(self): def f(): yield 1 assert", "is None def test_close3(self): def f(): try: yield 1 except", "NameError(\"Error\")) def test_throw3(self): def f(): try: yield 1 yield 2", "g = f() raises(TypeError, g.send) # one argument required raises(TypeError,", "== [1] assert repr(g) == r def test_unpackiterable_gen(self): g =", "1 v = (yield 2) except: yield 3 g =", "yield 1 g = f() assert list(g) == [1] def", "raise StopIteration g = f() assert [x for x in", "# single argument version raises(NameError, g.throw, NameError(\"Error\")) def test_throw3(self): def", "g() raises(ValueError, me.next) def test_generator_expression(self): exec \"res = sum(i*i for", "g.next() assert res == 1 raises(StopIteration, g.next) raises(NameError, g.throw, NameError)", "'''(): def g(x): yield x + 5 yield x +", "1 g = f() raises(TypeError, g.throw, NameError(\"Error\"), \"error\") def test_throw_fail2(self):", "def f(): yield 1 assert f().next() == 1 def test_generator2(self):", "def test_throw_fail2(self): def f(): yield 1 g = f() raises(TypeError,", "+ 5 return g.__code__ ''') assert should_not_inline(w_co) == False w_co", "2, 7] res = f() \"\"\" in d assert d['res']", "mygen() raises(TypeError, g.send, 2) raises(TypeError, g.send, 2) def test_should_not_inline(space): from", "g.send, 2) raises(TypeError, g.send, 2) def test_should_not_inline(space): from pypy.interpreter.generator import", "yield 1 raise StopIteration assert tuple(f()) == (1,) def test_exception_is_cleared_by_yield(self):", "\"Error\", None) def test_throw_fail(self): def f(): yield 1 g =", "def f(): try: yield 1 except GeneratorExit: raise StopIteration g", "v = (yield 2) except: yield 3 g = f()", "return g.__code__ ''') assert should_not_inline(w_co) == False w_co = space.appexec([],", "g.next() assert not g.gi_running raises(StopIteration, g.next) assert not g.gi_running assert", "yield 2 g = f() # two arguments version raises(NameError,", "except: yield 3 g = f() \"\"\" in d g", "= f() g.next() # String exceptions are not allowed anymore", "assert g.gi_frame is None assert g.gi_code is f.__code__ assert g.__name__", "def test_throw_finishes_generator(self): def f(): yield 1 g = f() assert", "(i*i for i in range(-5, 3)) assert set(g) == set([0,", "\"Error\") assert g.throw(Exception) == 3 raises(StopIteration, g.throw, Exception) def test_throw6(self):", "# --> 5 try: next(gen) except TypeError: pass def test_multiple_invalid_sends(self):", "yield 2 except: yield 3 g = f() assert g.next()", "yield 1 g = f() raises(TypeError, g.send) # one argument", "f(): yield 1 g = f() assert list(g) == [1]", "w_co = space.appexec([], '''(): def g(x): yield x + 5", "must send None def test_generator_explicit_stopiteration(self): def f(): yield 1 raise", "g = f() g.next() \"\"\" in d g = d['g']", "1 g = f() assert list(g) == [1] def test_generator4(self):", "yield 3 g = f() assert g.next() == 1 assert", "g import gc gc.collect() assert d['f'].x == 42 def test_generator_raises_typeerror(self):", "1 g = f() raises(TypeError, g.throw, list()) def test_throw_fail3(self): def", "f.x = 42 \"\"\".strip() in d g = d['f']() g.next()", "== [1] def test_generator_restart(self): def g(): i = me.next() yield", "def f(): yield 1 g = f() assert g.gi_frame is", "# => \"generator already executing\" except ValueError: yield 1 x", "yield 1 x = f() res = list(x) assert res", "1 g = myFunc() r = repr(g) assert r.startswith(\"<generator object", "StopIteration g = f() assert [x for x in g]", "1 x = f() res = list(x) assert res ==", "= f() assert g.gi_code is f.__code__ assert g.__name__ == 'f'", "= f() next(gen) # --> 5 try: next(gen) except TypeError:", "else it won't run on python2.4 d = {} exec", "assert [x for x in g] == [1] def test_generator_propagate_stopiteration(self):", "def f(): total = sum(i for i in [x for", "def test_generator5(self): d = {} exec \"\"\"if 1: def f():", "'''(): def g(x): yield x + 5 return g.__code__ ''')", "yield x + 5 yield x + 6 return g.__code__", "f().next() == 1 def test_generator2(self): def f(): yield 1 g", "range(5))\" assert res == 30 def test_generator_expression_2(self): d = {}", "def test_throw4(self): d = {} exec \"\"\"if 1: def f():", "yield 1 g = f() assert g.close() is None def", "42 def test_generator_raises_typeerror(self): def f(): yield 1 g = f()", "set([0, 1, 4, 9, 16, 25]) assert set(g) == set()", "d['g'] assert g.send(42) == 42 def test_throw1(self): def f(): yield", "g.gi_code is f.__code__ assert g.__name__ == 'f' def test_generator3(self): def", "def test_throw2(self): def f(): yield 2 g = f() #", "two arguments version raises(NameError, g.throw, NameError, \"Error\") def test_throw2(self): def", "foobar except NameError: yield 5 raise # should raise \"no", "yield 1 except GeneratorExit: yield 2 g = f() g.next()", "def test_generator_expression(self): exec \"res = sum(i*i for i in range(5))\"", "try: yield 1 except GeneratorExit: raise NameError g = f()", "None def test_close2(self): def f(): try: yield 1 except GeneratorExit:", "= f() assert g.close() is None def test_close2(self): def f():", "1 assert g.gi_running g = f() assert g.gi_code is f.__code__", "1 assert f().next() == 1 def test_generator2(self): def f(): yield", "yield 1 assert f().next() == 1 def test_generator2(self): def f():", "def test_close3(self): def f(): try: yield 1 except GeneratorExit: raise", "run on python2.4 d = {} exec \"\"\" def f():", "g = myFunc() r = repr(g) assert r.startswith(\"<generator object myFunc", "= f() # two arguments version raises(NameError, g.throw, NameError, \"Error\")", "exception to re-raise\" gen = f() next(gen) # --> 5", "= mygen() raises(TypeError, g.send, 2) raises(TypeError, g.send, 2) def test_should_not_inline(space):", "def test_close2(self): def f(): try: yield 1 except GeneratorExit: raise", "def test_generator_expression_2(self): d = {} exec \"\"\" def f(): total", "yield 1 g = f() res = g.next() assert res", "we need to exec it, else it won't run on", "= repr(g) assert r.startswith(\"<generator object myFunc at 0x\") assert list(g)", "test_explicit_stop_iteration_unpackiterable(self): def f(): yield 1 raise StopIteration assert tuple(f()) ==", "f() \"\"\" in d g = d['g'] assert g.next() ==", "1 raise StopIteration assert tuple(f()) == (1,) def test_exception_is_cleared_by_yield(self): def", "1 g = f() res = g.next() assert res ==", "== 42 def test_throw1(self): def f(): yield 2 g =", "1 raises(StopIteration, g.next) raises(NameError, g.throw, NameError) def test_close(self): def f():", "while 1: yield it.next() g = f() assert [x for", "def test_generator_restart(self): def g(): i = me.next() yield i me", "1 except GeneratorExit: yield 2 g = f() g.next() raises(RuntimeError,", "try: x.throw(IndexError) # => \"generator already executing\" except ValueError: yield", "test_close_on_collect(self): ## we need to exec it, else it won't", "myFunc at 0x\") assert list(g) == [1] assert repr(g) ==", "yield v g = f() g.next() \"\"\" in d g", "for x in g] == [1] def test_generator_propagate_stopiteration(self): def f():", "\"Error\") def test_throw2(self): def f(): yield 2 g = f()", "res == [1] def test_throw_on_finished_generator(self): def f(): yield 1 g", "== (10, 7) def test_repr(self): def myFunc(): yield 1 g", "(1,) def test_exception_is_cleared_by_yield(self): def f(): try: foobar except NameError: yield", "g = f() assert g.close() is None def test_close2(self): def", "g.throw, NameError, \"Error\") def test_throw2(self): def f(): yield 2 g", "in range(5))\" assert res == 30 def test_generator_expression_2(self): d =", "test_multiple_invalid_sends(self): def mygen(): yield 42 g = mygen() raises(TypeError, g.send,", "g = mygen() raises(TypeError, g.send, 2) raises(TypeError, g.send, 2) def", "g.next() del g import gc gc.collect() assert d['f'].x == 42", "= space.appexec([], '''(): def g(x): yield x + 5 return", "9, 16, 25]) assert set(g) == set() assert set(i for", "try: yield 1 except: x = 3 try: yield x", "x.throw(IndexError) # => \"generator already executing\" except ValueError: yield 1", "x in g] == [1] def test_generator5(self): d = {}", "raises(StopIteration, g.next) assert not g.gi_running assert g.gi_frame is None assert", "test_generator_expression_2(self): d = {} exec \"\"\" def f(): total =", "def f(): yield 2 g = f() # two arguments", "yield 3 g = f() \"\"\" in d g =", "assert g.gi_frame is not None assert not g.gi_running g.next() assert", "g = f() assert [x for x in g] ==", "= list(x) assert res == [1] def test_throw_on_finished_generator(self): def f():", "is not None assert not g.gi_running g.next() assert not g.gi_running", "[x for x in g] == [1] def test_generator5(self): d", "f() raises(TypeError, g.send) # one argument required raises(TypeError, g.send, 1)", "pypy.interpreter.generator import should_not_inline w_co = space.appexec([], '''(): def g(x): yield", "f(): total = sum(i for i in [x for x", "def test_explicit_stop_iteration_unpackiterable(self): def f(): yield 1 raise StopIteration assert tuple(f())", "2 g = f() # single argument version raises(NameError, g.throw,", "2) except: yield 3 g = f() \"\"\" in d", "in g] == [1] def test_generator_restart(self): def g(): i =", "test_throw5(self): def f(): try: yield 1 except: x = 3", "None def test_generator_explicit_stopiteration(self): def f(): yield 1 raise StopIteration g", "assert set(g) == set() assert set(i for i in range(0))", "f(): try: yield finally: f.x = 42 \"\"\".strip() in d", "3 raises(StopIteration, g.throw, Exception) def test_throw6(self): def f(): yield 2" ]
[ "with the same environment of VM' ) subparser.add_argument( '--disk-size', dest='disk_size',", "use latest hypervisor settings', ) subparser = subparsers.add_parser( 'delete', description=vm_delete.__doc__,", "any(isinstance(a, _SubParsersAction) for a in self._actions): return super(IGVMArgumentParser, self).format_help() out", "summing up the silent and verbose arguments in here. It", "in vars(ColorFormatters): msg = getattr(ColorFormatters, level).format(msg) return msg def parse_args():", "to be called after every # use. We are also", "action='store_true', help='Allow migrating to a host which has the state", ") subparser.set_defaults(func=disk_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "the guest system', ) subparser = subparsers.add_parser( 'rename', description=vm_rename.__doc__, )", "debug mode', ) subparser.add_argument( '--offline', action='store_true', help='Force offline migration', )", "subparsers.add_parser( 'define', description=vm_define.__doc__, ) subparser.set_defaults(func=vm_define) subparser.add_argument('vm_hostname', help='Hostname of the guest", "help='Hostname of the guest system', ) subparser.add_argument( 'size', help=( 'New", "subparser.set_defaults(func=host_info) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser", "Host which has the state online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd',", "StreamHandler to format messages short-cutting Formatters\"\"\" def __init__(self, *args, **kwargs):", "help='Run puppet in debug mode', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true',", "and print help for choice, subparser in subparsers_action.choices.items(): out.append(ColorFormatters.BOLD.format(choice)) if", "subparsers.add_parser( 'migrate', description=vm_migrate.__doc__, ) subparser.set_defaults(func=vm_migrate) subparser.add_argument( 'vm_hostname', help='Hostname of the", "might be really wrong. Run igvm ' 'with --verbose to", "help=( 'New IPv4 address of VM' ) ) subparser.add_argument( '--offline',", "= subparsers.add_parser( 'rename', description=vm_rename.__doc__, ) subparser.set_defaults(func=vm_rename) subparser.add_argument( 'vm_hostname', help='Hostname of", "subparser.set_defaults(func=change_address) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "[1] for the levels. # Paramiko is overly verbose. We", "We configure it for one level higher. # # [1]", "return super(IGVMArgumentParser, self).format_help() out = [] out.append(ColorFormatters.BOLD.format(__doc__)) out.append('Available commands:\\n') subparsers_actions", "will probably only be one subparser_action, but better safe #", "class IGVMLogHandler(StreamHandler): \"\"\"Extend StreamHandler to format messages short-cutting Formatters\"\"\" def", "action='store_true', help='Run puppet in debug mode', ) subparser.add_argument( '--offline', action='store_true',", "the domain to use latest hypervisor settings', ) subparser =", "# destruction right after the disconnect function is called. We", ") subparser = subparsers.add_parser( 'delete', description=vm_delete.__doc__, ) subparser.set_defaults(func=vm_delete) subparser.add_argument( 'vm_hostname',", "mode', ) subparser.add_argument( '--offline', action='store_true', help='Force offline migration', ) subparser.add_argument(", "them both, but giving an error is not # better.", "build it if not defined', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true',", "on a Host which has the state online_reserved', ) subparser.add_argument(", "description=vm_restart.__doc__, ) subparser.set_defaults(func=vm_restart) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "igvm fails to find ' 'a matching Hypervisor something might", "all preferences so that Hypervisors are not excluded. ' 'Use", "right after the disconnect function is called. We are #", "dest='run_puppet', help='Skip running puppet in chroot before powering up', )", "IGVMArgumentParser(ArgumentParser): def format_help(self): if not any(isinstance(a, _SubParsersAction) for a in", "help='Hostname of the guest system', ) subparser.add_argument( 'hypervisor_hostname', nargs='?', default=None,", "subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( '--retire',", "return '\\n'.join(out) class IGVMLogHandler(StreamHandler): \"\"\"Extend StreamHandler to format messages short-cutting", "print_function from argparse import ArgumentParser, _SubParsersAction from logging import StreamHandler,", "not defined', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all preferences", ") subparser.add_argument( '--migrate', action='store_true', help='Migrate VM to new HV while", "of the guest system', ) subparser.add_argument( 'count', type=int, help='New number", "import disconnect_all from igvm.commands import ( change_address, disk_set, evacuate, host_info,", "'migrate', description=vm_migrate.__doc__, ) subparser.set_defaults(func=vm_migrate) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "logging library documentation [1] for the levels. # Paramiko is", "'--retire', action='store_true', help='Retire VM after stopping it', ) subparser =", "Hypervisor something might be really wrong. Run igvm ' 'with", "or migrate VM only to a HV with the same", "' operator to shut down VM for 24h.' ), )", "help='Hostname of the guest system', ) subparser.add_argument( 'count', type=int, help='New", "'{}: {}: {}'.format(level, record.name, record.getMessage()) if self.isatty and level in", "guest system', ) subparser.add_argument( 'new_address', help=( 'New IPv4 address of", "subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser =", ") subparser = subparsers.add_parser( 'disk-set', description=disk_set.__doc__, ) subparser.set_defaults(func=disk_set) subparser.add_argument( 'vm_hostname',", "help='New hostname', ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, if running',", "of the guest system') return vars(top_parser.parse_args()) def main(): args =", "**kwargs) self.isatty = self.stream.isatty() def format(self, record): level = record.levelname", "verbose. We configure it for one level higher. # #", "vm_start, vm_stop, vm_sync, vm_define, ) from igvm.libvirt import close_virtconns class", "IGVMArgumentParser('igvm') top_parser.add_argument('--silent', '-s', action='count', default=0) top_parser.add_argument('--verbose', '-v', action='count', default=0) subparsers", "'build', description=vm_build.__doc__, ) subparser.set_defaults(func=vm_build) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "subparser.add_argument( '--postboot', metavar='postboot_script', help='Run postboot_script on the guest after first", "{}'.format(level, record.name, record.getMessage()) if self.isatty and level in vars(ColorFormatters): msg", "of the guest system', ) subparser.add_argument( 'new_address', help=( 'New IPv4", "both, but giving an error is not # better. See", "(default) or netcat transport to migrate disk image' ), )", ")) out.append('\\n\\t{}'.format(subparser.format_usage())) return '\\n'.join(out) class IGVMLogHandler(StreamHandler): \"\"\"Extend StreamHandler to format", "'--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow building on a Host which has", "system', ) subparser = subparsers.add_parser( 'sync', description=vm_sync.__doc__, ) subparser.set_defaults(func=vm_sync) subparser.add_argument(", "subparser.add_argument( '--retire', action='store_true', help='Retire VM after stopping it', ) subparser", ") subparser.add_argument( '--offline', action='store_true', help='Force offline migration', ) subparser.add_argument( '--ignore-reserved',", "hypervisor settings', ) subparser = subparsers.add_parser( 'delete', description=vm_delete.__doc__, ) subparser.set_defaults(func=vm_delete)", "self.isatty = self.stream.isatty() def format(self, record): level = record.levelname msg", "IP address change offline', ) subparser.add_argument( '--migrate', action='store_true', help='Migrate VM", "of the guest system', ) subparser.add_argument( '--retire', action='store_true', help='Set VM", "the guest system', ) subparser.add_argument( '--force', action='store_true', help='Do not wait", "library of Fabric, Paramiko, raises an error, on # destruction", "subparser = subparsers.add_parser( 'vcpu-set', description=vcpu_set.__doc__, ) subparser.set_defaults(func=vcpu_set) subparser.add_argument( 'vm_hostname', help='Hostname", ") subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migrating to a host", "VM during offline migration, igvm will wait for' ' operator", "return vars(top_parser.parse_args()) def main(): args = parse_args() configure_root_logger(args.pop('silent'), args.pop('verbose')) try:", "action='store_true', help='Perform IP address change offline', ) subparser.add_argument( '--migrate', action='store_true',", "subparser.add_argument( '--offline-transport', default='drbd', help=( 'Specify drbd (default) or netcat transport", "help='Overrules all preferences so that Hypervisors are not excluded. '", "sleeping for a little while to avoid this. time.sleep(0.1) def", "https://docs.python.org/library/logging.html#logging-levels level = 20 + (silent - verbose) * 10", "disk of migrated VM. Expects new size in GiB. '", "subparser.add_argument( 'dst_hv_hostname', nargs='?', default=None, help='Hostname of destination hypervisor', ) subparser.add_argument(", "excluded. ' 'Use this if igvm fails to find a", "record.name, record.getMessage()) if self.isatty and level in vars(ColorFormatters): msg =", "command line interface Copyright (c) 2017 InnoGames GmbH \"\"\" from", "subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( 'count',", "unit (default GiB). ' 'Can be specified relative with \"+\".", "Hypervisor.', ) subparser = subparsers.add_parser( 'migrate', description=vm_migrate.__doc__, ) subparser.set_defaults(func=vm_migrate) subparser.add_argument(", "subparser.add_argument( 'size', help=( 'New memory size with optional unit (default", "and verbose arguments in here. It # is not really", "out.append('\\n'.join( '\\t{}'.format(l.strip()) for l in subparser .get_default('func').__doc__.strip().splitlines() )) out.append('\\n\\t{}'.format(subparser.format_usage())) return", ") subparser.add_argument( '--postboot', metavar='postboot_script', help='Run postboot_script on the guest after", ") subparser.set_defaults(func=evacuate) subparser.add_argument( 'hv_hostname', help='Hostname of the hypervisor', ) subparser.add_argument(", "root_logger.addHandler(IGVMLogHandler()) # We are summing up the silent and verbose", "here. It # is not really meaningful to use them", "image' ), ) subparser = subparsers.add_parser( 'disk-set', description=disk_set.__doc__, ) subparser.set_defaults(func=disk_set)", "dest='allow_reserved_hv', action='store_true', help='Allow migrating to a host which has the", "'size', help=( 'New disk size with an optional unit (default", "postboot_script on the guest after first boot', ) subparser.add_argument( '--skip-puppet',", "level higher. # # [1] https://docs.python.org/library/logging.html#logging-levels level = 20 +", "disconnect function is called. We are # sleeping for a", "= '\\033[1;41m{}\\033[0m' class IGVMArgumentParser(ArgumentParser): def format_help(self): if not any(isinstance(a, _SubParsersAction)", "and level in vars(ColorFormatters): msg = getattr(ColorFormatters, level).format(msg) return msg", "help=( 'New disk size with an optional unit (default GiB).", "\"\"\" from __future__ import print_function from argparse import ArgumentParser, _SubParsersAction", "to new HV while changing IP address', ) subparser.add_argument( '--ignore-reserved',", "size with optional unit (default is MiB).' 'Only integers are", "'\\033[1;31m{}\\033[0m' CRITICAL = '\\033[1;41m{}\\033[0m' class IGVMArgumentParser(ArgumentParser): def format_help(self): if not", "'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( '--force', action='store_true',", "subparser.set_defaults(func=vm_restart) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "this if igvm fails to find a matching Hypervisor, but", "not any(isinstance(a, _SubParsersAction) for a in self._actions): return super(IGVMArgumentParser, self).format_help()", "shutdown VM during offline migration, igvm will wait for' '", "or netcat transport to migrate disk image' ), ) subparser", "drbd (default) or netcat transport to migrate disk image' ),", "help='New number of CPUs', ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM,", "to shutdown gracefully', ) subparser.add_argument( '--no-redefine', action='store_true', help='Do not redefine", "guest system', ) subparser = subparsers.add_parser( 'rename', description=vm_rename.__doc__, ) subparser.set_defaults(func=vm_rename)", "format_help(self): if not any(isinstance(a, _SubParsersAction) for a in self._actions): return", "Serveradmin instead of deleting', ) subparser = subparsers.add_parser( 'info', description=host_info.__doc__,", "destruction right after the disconnect function is called. We are", "# Paramiko is overly verbose. We configure it for one", "Host which has the state online_reserved', ) subparser.add_argument( '--rebuild', dest='rebuild',", "the guest system') return vars(top_parser.parse_args()) def main(): args = parse_args()", "to migrate disk image' ), ) subparser = subparsers.add_parser( 'disk-set',", "function is called. We are # sleeping for a little", "subparsers_actions: # Get all subparsers and print help for choice,", "action='count', default=0) top_parser.add_argument('--verbose', '-v', action='count', default=0) subparsers = top_parser.add_subparsers(help='Actions') subparser", "one level higher. # # [1] https://docs.python.org/library/logging.html#logging-levels level = 20", "[1] https://docs.python.org/library/logging.html#logging-levels level = 20 + (silent - verbose) *", "action='count', default=0) subparsers = top_parser.add_subparsers(help='Actions') subparser = subparsers.add_parser( 'build', description=vm_build.__doc__,", "__future__ import print_function from argparse import ArgumentParser, _SubParsersAction from logging", "vm_stop, vm_sync, vm_define, ) from igvm.libvirt import close_virtconns class ColorFormatters():", "optional unit (default GiB). ' 'Can be specified relative with", "a VM, set it to given state, maintenance by default',", "commands:\\n') subparsers_actions = [ action for action in self._actions if", "find a matching Hypervisor, but you ' 'are in urgent", ") subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow building on a Host", "matching Hypervisor, but you ' 'are in urgent need to", "help='Run puppet in chroot before powering up', ) subparser.add_argument( '--debug-puppet',", "'--dry-run', action='store_true', help='Do not migrate but just print what would", "change_address, disk_set, evacuate, host_info, mem_set, vcpu_set, vm_build, vm_delete, vm_migrate, vm_rename,", "every # use. We are also taking our chance to", "'--offline', action='store_true', help='Perform IP address change offline', ) subparser.add_argument( '--migrate',", "'xfs'), help=( 'Specify drbd (default), netcat or xfs transport to", "'--enforce-vm-env', dest='enforce_vm_env', action='store_true', help='Build or migrate VM only to a", "\"\"\"igvm - The command line interface Copyright (c) 2017 InnoGames", "raises an error, on # destruction right after the disconnect", "subparser.set_defaults(func=vcpu_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "# the hypervisors. disconnect_all() close_virtconns() # The underlying library of", "for one level higher. # # [1] https://docs.python.org/library/logging.html#logging-levels level =", "(default), netcat or xfs transport to migrate ' 'disk image'", "finding a Hypervisor.', ) subparser = subparsers.add_parser( 'migrate', description=vm_migrate.__doc__, )", "' 'Works only with --offline --offline-transport=xfs', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences',", "ArgumentParser, _SubParsersAction from logging import StreamHandler, root as root_logger import", "destination hypervisor', ) subparser.add_argument( '--run-puppet', action='store_true', help='Run puppet in chroot", "address change offline', ) subparser.add_argument( '--migrate', action='store_true', help='Migrate VM to", ") subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, change memory, and restart", "of the guest system', ) subparser.add_argument( '--unretire', nargs='?', const='maintenance', help='Unretire", "= parse_args() configure_root_logger(args.pop('silent'), args.pop('verbose')) try: args.pop('func')(**args) finally: # Fabric requires", ") subparser.set_defaults(func=host_info) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "a Hypervisor.', ) subparser = subparsers.add_parser( 'define', description=vm_define.__doc__, ) subparser.set_defaults(func=vm_define)", "root as root_logger import time from fabric.network import disconnect_all from", "to use them both, but giving an error is not", "if not defined', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all", ") subparser.add_argument( '--force', action='store_true', help='Do not wait for guest to", "subparser.add_argument( '--no-redefine', action='store_true', help='Do not redefine the domain to use", ") subparser.add_argument( '--offline', action='store_true', help='Perform IP address change offline', )", "help='Hostname of destination hypervisor', ) subparser.add_argument( '--dry-run', action='store_true', help='Do not", "use. We are also taking our chance to disconnect from", "'Don\\'t shutdown VM during offline migration, igvm will wait for'", "underlying library of Fabric, Paramiko, raises an error, on #", "latest hypervisor settings', ) subparser = subparsers.add_parser( 'delete', description=vm_delete.__doc__, )", "of Fabric, Paramiko, raises an error, on # destruction right", ") subparser.set_defaults(func=vm_rename) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "action for action in self._actions if isinstance(action, _SubParsersAction) ] #", "help='Hostname of the guest system', ) subparser.add_argument( '--force', action='store_true', help='Do", "action='store_true', help='Migrate VM to new HV while changing IP address',", "but giving an error is not # better. See Python", "fails finding a Hypervisor.', ) subparser = subparsers.add_parser( 'define', description=vm_define.__doc__,", "), ) subparser.add_argument( '--no-shutdown', action='store_true', help=( 'Don\\'t shutdown VM during", "Hypervisor.', ) subparser = subparsers.add_parser( 'change-address', description=disk_set.__doc__, ) subparser.set_defaults(func=change_address) subparser.add_argument(", "subparser.set_defaults(func=vm_delete) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migrating to a host which", "maintenance by default', ) subparser = subparsers.add_parser( 'stop', description=vm_stop.__doc__, )", "on # destruction right after the disconnect function is called.", ") subparser.add_argument( 'count', type=int, help='New number of CPUs', ) subparser.add_argument(", "in subparsers_action.choices.items(): out.append(ColorFormatters.BOLD.format(choice)) if subparser.get_default('func').__doc__: out.append('\\n'.join( '\\t{}'.format(l.strip()) for l in", "offline', ) subparser.add_argument( '--migrate', action='store_true', help='Migrate VM to new HV", ") subparser.add_argument( '--dry-run', action='store_true', help='Do not migrate but just print", "= getattr(ColorFormatters, level).format(msg) return msg def parse_args(): top_parser = IGVMArgumentParser('igvm')", "MiB).' 'Only integers are allowed.' ), ) subparser.add_argument( '--offline', action='store_true',", "# than sorry. for subparsers_action in subparsers_actions: # Get all", "subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( 'size',", "what would be done' ) subparser.add_argument( '--offline', nargs='*', help='Migrate VMs", "hypervisors. disconnect_all() close_virtconns() # The underlying library of Fabric, Paramiko,", "are also taking our chance to disconnect from # the", "the same environment of VM' ) subparser.add_argument( '--disk-size', dest='disk_size', type=int,", "system', ) subparser = subparsers.add_parser( 'rename', description=vm_rename.__doc__, ) subparser.set_defaults(func=vm_rename) subparser.add_argument(", "'--debug-puppet', action='store_true', help='Run puppet in debug mode', ) subparser.add_argument( '--ignore-reserved',", "IGVMLogHandler(StreamHandler): \"\"\"Extend StreamHandler to format messages short-cutting Formatters\"\"\" def __init__(self,", "help=( 'New memory size with optional unit (default is MiB).'", "the given serveradmin function offline', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true',", "arguments in here. It # is not really meaningful to", "system', ) subparser.add_argument( '--unretire', nargs='?', const='maintenance', help='Unretire a VM, set", "WARNING = '\\033[1;33m{}\\033[0m' ERROR = '\\033[1;31m{}\\033[0m' CRITICAL = '\\033[1;41m{}\\033[0m' class", "subparsers.add_parser( 'info', description=host_info.__doc__, ) subparser.set_defaults(func=host_info) subparser.add_argument( 'vm_hostname', help='Hostname of the", "# We are summing up the silent and verbose arguments", "), ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, change memory, and", "logging import StreamHandler, root as root_logger import time from fabric.network", "IPv4 address of VM' ) ) subparser.add_argument( '--offline', action='store_true', help='Perform", "not wait for guest to shutdown gracefully', ) subparser.add_argument( '--retire',", "help='Run puppet in debug mode', ) subparser.add_argument( '--offline', action='store_true', help='Force", "'New disk size with an optional unit (default GiB). '", "args.pop('func')(**args) finally: # Fabric requires the disconnect function to be", "dest='rebuild', action='store_true', help='Rebuild already defined VM or build it if", "the guest system', ) subparser.add_argument( '--postboot', metavar='postboot_script', help='Run postboot_script on", "gracefully', ) subparser.add_argument( '--no-redefine', action='store_true', help='Do not redefine the domain", "the guest system', ) subparser.add_argument( '--retire', action='store_true', help='Set VM state", "online_reserved', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all preferences so", "subparser.set_defaults(func=mem_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "def format_help(self): if not any(isinstance(a, _SubParsersAction) for a in self._actions):", "safe # than sorry. for subparsers_action in subparsers_actions: # Get", "'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( '--unretire', nargs='?',", "class ColorFormatters(): BOLD = '\\033[1m{}\\033[0m' WARNING = '\\033[1;33m{}\\033[0m' ERROR =", "action='store_true', help='Force offline migration', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow", "the state online_reserved', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all", "subparser.add_argument('vm_hostname', help='Hostname of the guest system') return vars(top_parser.parse_args()) def main():", "'Use this if igvm fails to find a matching Hypervisor,", "Fabric requires the disconnect function to be called after every", "__init__(self, *args, **kwargs): super(IGVMLogHandler, self).__init__(*args, **kwargs) self.isatty = self.stream.isatty() def", "unit (default is MiB).' 'Only integers are allowed.' ), )", "specified relative with \"+\". Only integers are allowed' ) )", "fabric.network import disconnect_all from igvm.commands import ( change_address, disk_set, evacuate,", ") subparser.set_defaults(func=vm_migrate) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "# # [1] https://docs.python.org/library/logging.html#logging-levels level = 20 + (silent -", "check why it fails finding a Hypervisor.', ) subparser =", "level = record.levelname msg = '{}: {}: {}'.format(level, record.name, record.getMessage())", "import ( change_address, disk_set, evacuate, host_info, mem_set, vcpu_set, vm_build, vm_delete,", ") subparser.set_defaults(func=vm_sync) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "'Specify drbd (default), netcat or xfs transport to migrate '", "level in vars(ColorFormatters): msg = getattr(ColorFormatters, level).format(msg) return msg def", "not redefine the domain to use latest hypervisor settings', )", "# better. See Python logging library documentation [1] for the", "not excluded. ' 'Use this if igvm fails to find", "VM for 24h.' ), ) subparser.add_argument( '--enforce-vm-env', dest='enforce_vm_env', action='store_true', help='Build", "has the state online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd', choices=('drbd', 'netcat',", ") ) subparser = subparsers.add_parser( 'mem-set', description=mem_set.__doc__, ) subparser.set_defaults(func=mem_set) subparser.add_argument(", "subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, change CPUs, and restart VM',", "subparser.add_argument( '--enforce-vm-env', dest='enforce_vm_env', action='store_true', help='Build or migrate VM only to", "'start', description=vm_start.__doc__, ) subparser.set_defaults(func=vm_start) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "from igvm.libvirt import close_virtconns class ColorFormatters(): BOLD = '\\033[1m{}\\033[0m' WARNING", "for a in self._actions): return super(IGVMArgumentParser, self).format_help() out = []", "const='maintenance', help='Unretire a VM, set it to given state, maintenance", "not really meaningful to use them both, but giving an", "it for one level higher. # # [1] https://docs.python.org/library/logging.html#logging-levels level", "in subparser .get_default('func').__doc__.strip().splitlines() )) out.append('\\n\\t{}'.format(subparser.format_usage())) return '\\n'.join(out) class IGVMLogHandler(StreamHandler): \"\"\"Extend", ") subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, change CPUs, and restart", "default=None, help='Hostname of destination hypervisor', ) subparser.add_argument( '--dry-run', action='store_true', help='Do", "of the guest system', ) subparser.add_argument( 'size', help=( 'New memory", "format(self, record): level = record.levelname msg = '{}: {}: {}'.format(level,", "= subparsers.add_parser( 'stop', description=vm_stop.__doc__, ) subparser.set_defaults(func=vm_stop) subparser.add_argument( 'vm_hostname', help='Hostname of", "of the guest system', ) subparser.add_argument( 'size', help=( 'New disk", "not wait for guest to shutdown gracefully', ) subparser.add_argument( '--no-redefine',", "is not # better. See Python logging library documentation [1]", "of destination hypervisor', ) subparser.add_argument( '--dry-run', action='store_true', help='Do not migrate", "vars(ColorFormatters): msg = getattr(ColorFormatters, level).format(msg) return msg def parse_args(): top_parser", "has the state online_reserved', ) subparser.add_argument( '--rebuild', dest='rebuild', action='store_true', help='Rebuild", "wait for' ' operator to shut down VM for 24h.'", "state, maintenance by default', ) subparser = subparsers.add_parser( 'stop', description=vm_stop.__doc__,", "subparser = subparsers.add_parser( 'migrate', description=vm_migrate.__doc__, ) subparser.set_defaults(func=vm_migrate) subparser.add_argument( 'vm_hostname', help='Hostname", "subparser.set_defaults(func=vm_stop) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "running', ) subparser = subparsers.add_parser( 'evacuate', description=evacuate.__doc__, ) subparser.set_defaults(func=evacuate) subparser.add_argument(", ") subparser = subparsers.add_parser( 'define', description=vm_define.__doc__, ) subparser.set_defaults(func=vm_define) subparser.add_argument('vm_hostname', help='Hostname", "from igvm.commands import ( change_address, disk_set, evacuate, host_info, mem_set, vcpu_set,", "[] out.append(ColorFormatters.BOLD.format(__doc__)) out.append('Available commands:\\n') subparsers_actions = [ action for action", "type=int, help='Resize disk of migrated VM. Expects new size in", "is called. We are # sleeping for a little while", "the hypervisor', ) subparser.add_argument( 'dst_hv_hostname', nargs='?', default=None, help='Hostname of destination", "help='Resize disk of migrated VM. Expects new size in GiB.", "puppet in chroot before powering up', ) subparser.add_argument( '--debug-puppet', action='store_true',", "igvm fails to find a matching Hypervisor, but you '", "' 'Use this if igvm fails to find a matching", "'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( 'size', help=(", "# Get all subparsers and print help for choice, subparser", ") subparser.add_argument( '--skip-puppet', action='store_false', dest='run_puppet', help='Skip running puppet in chroot", "subparser = subparsers.add_parser( 'disk-set', description=disk_set.__doc__, ) subparser.set_defaults(func=disk_set) subparser.add_argument( 'vm_hostname', help='Hostname", "vm_sync, vm_define, ) from igvm.libvirt import close_virtconns class ColorFormatters(): BOLD", "= subparsers.add_parser( 'sync', description=vm_sync.__doc__, ) subparser.set_defaults(func=vm_sync) subparser.add_argument( 'vm_hostname', help='Hostname of", "'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( '--retire', action='store_true',", "self).__init__(*args, **kwargs) self.isatty = self.stream.isatty() def format(self, record): level =", "'New IPv4 address of VM' ) ) subparser.add_argument( '--offline', action='store_true',", "system', ) subparser.add_argument( 'size', help=( 'New disk size with an", "action='store_true', help='Shutdown VM, change CPUs, and restart VM', ) subparser", "subparser = subparsers.add_parser( 'delete', description=vm_delete.__doc__, ) subparser.set_defaults(func=vm_delete) subparser.add_argument( 'vm_hostname', help='Hostname", "system', ) subparser.add_argument( '--retire', action='store_true', help='Set VM state to \"retired\"", "it fails finding a Hypervisor.', ) subparser = subparsers.add_parser( 'change-address',", "GiB). ' 'Can be specified relative with \"+\". Only integers", "'--offline', action='store_true', help='Shutdown VM, if running', ) subparser = subparsers.add_parser(", "VM', ) subparser = subparsers.add_parser( 'vcpu-set', description=vcpu_set.__doc__, ) subparser.set_defaults(func=vcpu_set) subparser.add_argument(", "than sorry. for subparsers_action in subparsers_actions: # Get all subparsers", "as root_logger import time from fabric.network import disconnect_all from igvm.commands", ") subparser.add_argument( '--offline-transport', default='drbd', help=( 'Specify drbd (default) or netcat", "subparser.add_argument( '--offline', nargs='*', help='Migrate VMs matching the given serveradmin function", "to given state, maintenance by default', ) subparser = subparsers.add_parser(", "migration to a Host which has the state online_reserved', )", "--verbose to check why it fails finding a Hypervisor.', )", "subparser.add_argument( '--run-puppet', action='store_true', help='Run puppet in chroot before powering up',", ") subparser = subparsers.add_parser( 'info', description=host_info.__doc__, ) subparser.set_defaults(func=host_info) subparser.add_argument( 'vm_hostname',", "guest system', ) subparser.add_argument( '--force', action='store_true', help='Do not wait for", "disk size with an optional unit (default GiB). ' 'Can", "'--unretire', nargs='?', const='maintenance', help='Unretire a VM, set it to given", "subparsers = top_parser.add_subparsers(help='Actions') subparser = subparsers.add_parser( 'build', description=vm_build.__doc__, ) subparser.set_defaults(func=vm_build)", "'--postboot', metavar='postboot_script', help='Run postboot_script on the guest after first boot',", "offline migration, igvm will wait for' ' operator to shut", ") subparser.add_argument( '--offline', nargs='*', help='Migrate VMs matching the given serveradmin", "'--rebuild', dest='rebuild', action='store_true', help='Rebuild already defined VM or build it", "'hv_hostname', help='Hostname of the hypervisor', ) subparser.add_argument( 'dst_hv_hostname', nargs='?', default=None,", "), ) subparser.add_argument( '--enforce-vm-env', dest='enforce_vm_env', action='store_true', help='Build or migrate VM", "'--offline', action='store_true', help='Shutdown VM, change memory, and restart VM', )", "'\\033[1m{}\\033[0m' WARNING = '\\033[1;33m{}\\033[0m' ERROR = '\\033[1;31m{}\\033[0m' CRITICAL = '\\033[1;41m{}\\033[0m'", "action='store_true', help='Run puppet in debug mode', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv',", "called after every # use. We are also taking our", "a matching Hypervisor, but you ' 'are in urgent need", "record): level = record.levelname msg = '{}: {}: {}'.format(level, record.name,", "system', ) subparser.add_argument( 'size', help=( 'New memory size with optional", "subparser = subparsers.add_parser( 'rename', description=vm_rename.__doc__, ) subparser.set_defaults(func=vm_rename) subparser.add_argument( 'vm_hostname', help='Hostname", "args.pop('verbose')) try: args.pop('func')(**args) finally: # Fabric requires the disconnect function", "= subparsers.add_parser( 'start', description=vm_start.__doc__, ) subparser.set_defaults(func=vm_start) subparser.add_argument( 'vm_hostname', help='Hostname of", "subparser.add_argument( 'size', help=( 'New disk size with an optional unit", "line interface Copyright (c) 2017 InnoGames GmbH \"\"\" from __future__", "state to \"retired\" on Serveradmin instead of deleting', ) subparser", "guest system', ) subparser.add_argument( 'size', help=( 'New disk size with", "import print_function from argparse import ArgumentParser, _SubParsersAction from logging import", "change memory, and restart VM', ) subparser = subparsers.add_parser( 'vcpu-set',", "to shut down VM for 24h.' ), ) subparser.add_argument( '--enforce-vm-env',", "= '\\033[1m{}\\033[0m' WARNING = '\\033[1;33m{}\\033[0m' ERROR = '\\033[1;31m{}\\033[0m' CRITICAL =", "subparser.add_argument( '--debug-puppet', action='store_true', help='Run puppet in debug mode', ) subparser.add_argument(", "image' ), ) subparser.add_argument( '--no-shutdown', action='store_true', help=( 'Don\\'t shutdown VM", "subparser.add_argument( 'count', type=int, help='New number of CPUs', ) subparser.add_argument( '--offline',", "has the state online_reserved', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules", "probably only be one subparser_action, but better safe # than", "error is not # better. See Python logging library documentation", "argparse import ArgumentParser, _SubParsersAction from logging import StreamHandler, root as", "help='Hostname of the guest system', ) subparser.add_argument( '--unretire', nargs='?', const='maintenance',", "'\\033[1;41m{}\\033[0m' class IGVMArgumentParser(ArgumentParser): def format_help(self): if not any(isinstance(a, _SubParsersAction) for", "parse_args(): top_parser = IGVMArgumentParser('igvm') top_parser.add_argument('--silent', '-s', action='count', default=0) top_parser.add_argument('--verbose', '-v',", "Get all subparsers and print help for choice, subparser in", "netcat or xfs transport to migrate ' 'disk image' ),", "changing IP address', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migration", "the guest system', ) subparser.add_argument( '--unretire', nargs='?', const='maintenance', help='Unretire a", "domain to use latest hypervisor settings', ) subparser = subparsers.add_parser(", "subparser.add_argument( 'hv_hostname', help='Hostname of the hypervisor', ) subparser.add_argument( 'dst_hv_hostname', nargs='?',", "migrate VM only to a HV with the same environment", "'--debug-puppet', action='store_true', help='Run puppet in debug mode', ) subparser.add_argument( '--offline',", "number of CPUs', ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, change", "why it fails finding a Hypervisor.', ) subparser = subparsers.add_parser(", "after every # use. We are also taking our chance", "configure_root_logger(args.pop('silent'), args.pop('verbose')) try: args.pop('func')(**args) finally: # Fabric requires the disconnect", "'restart', description=vm_restart.__doc__, ) subparser.set_defaults(func=vm_restart) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "boot', ) subparser.add_argument( '--skip-puppet', action='store_false', dest='run_puppet', help='Skip running puppet in", "We are summing up the silent and verbose arguments in", "), ) subparser = subparsers.add_parser( 'disk-set', description=disk_set.__doc__, ) subparser.set_defaults(func=disk_set) subparser.add_argument(", "to do it anyway. Hint: If igvm fails to find", "guest system', ) subparser.add_argument( '--postboot', metavar='postboot_script', help='Run postboot_script on the", "same environment of VM' ) subparser.add_argument( '--disk-size', dest='disk_size', type=int, help='Resize", "nargs='?', default=None, help='Hostname of destination hypervisor', ) subparser.add_argument( '--run-puppet', action='store_true',", ") subparser.add_argument( 'hypervisor_hostname', nargs='?', default=None, help='Hostname of destination hypervisor', )", "'--offline-transport', default='drbd', choices=('drbd', 'netcat', 'xfs'), help=( 'Specify drbd (default), netcat", "'--run-puppet', action='store_true', help='Run puppet in chroot before powering up', )", "'vcpu-set', description=vcpu_set.__doc__, ) subparser.set_defaults(func=vcpu_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "help=( 'Specify drbd (default) or netcat transport to migrate disk", "default=0) subparsers = top_parser.add_subparsers(help='Actions') subparser = subparsers.add_parser( 'build', description=vm_build.__doc__, )", "'dst_hv_hostname', nargs='?', default=None, help='Hostname of destination hypervisor', ) subparser.add_argument( '--dry-run',", "{}: {}'.format(level, record.name, record.getMessage()) if self.isatty and level in vars(ColorFormatters):", "isinstance(action, _SubParsersAction) ] # There will probably only be one", "of CPUs', ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, change CPUs,", "finding a Hypervisor.', ) subparser = subparsers.add_parser( 'define', description=vm_define.__doc__, )", "= subparsers.add_parser( 'build', description=vm_build.__doc__, ) subparser.set_defaults(func=vm_build) subparser.add_argument( 'vm_hostname', help='Hostname of", "msg = getattr(ColorFormatters, level).format(msg) return msg def parse_args(): top_parser =", "'stop', description=vm_stop.__doc__, ) subparser.set_defaults(func=vm_stop) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "subparsers.add_parser( 'restart', description=vm_restart.__doc__, ) subparser.set_defaults(func=vm_restart) subparser.add_argument( 'vm_hostname', help='Hostname of the", "igvm ' 'with --verbose to check why it fails finding", "for subparsers_action in subparsers_actions: # Get all subparsers and print", "VM, set it to given state, maintenance by default', )", "guest system', ) subparser.add_argument( 'size', help=( 'New memory size with", "the guest after first boot', ) subparser.add_argument( '--skip-puppet', action='store_false', dest='run_puppet',", "for' ' operator to shut down VM for 24h.' ),", "to a Host which has the state online_reserved', ) subparser.add_argument(", "netcat transport to migrate disk image' ), ) subparser =", "Only integers are allowed' ) ) subparser = subparsers.add_parser( 'mem-set',", "be done' ) subparser.add_argument( '--offline', nargs='*', help='Migrate VMs matching the", "out.append('\\n\\t{}'.format(subparser.format_usage())) return '\\n'.join(out) class IGVMLogHandler(StreamHandler): \"\"\"Extend StreamHandler to format messages", "**kwargs): super(IGVMLogHandler, self).__init__(*args, **kwargs) self.isatty = self.stream.isatty() def format(self, record):", "by default', ) subparser = subparsers.add_parser( 'stop', description=vm_stop.__doc__, ) subparser.set_defaults(func=vm_stop)", "the disconnect function is called. We are # sleeping for", "'a matching Hypervisor something might be really wrong. Run igvm", "= top_parser.add_subparsers(help='Actions') subparser = subparsers.add_parser( 'build', description=vm_build.__doc__, ) subparser.set_defaults(func=vm_build) subparser.add_argument(", "memory, and restart VM', ) subparser = subparsers.add_parser( 'vcpu-set', description=vcpu_set.__doc__,", "action='store_true', help='Do not redefine the domain to use latest hypervisor", "subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( 'hypervisor_hostname',", "guest system', ) subparser = subparsers.add_parser( 'sync', description=vm_sync.__doc__, ) subparser.set_defaults(func=vm_sync)", "help='Hostname of the guest system') return vars(top_parser.parse_args()) def main(): args", "subparser.add_argument( '--offline-transport', default='drbd', choices=('drbd', 'netcat', 'xfs'), help=( 'Specify drbd (default),", "higher. # # [1] https://docs.python.org/library/logging.html#logging-levels level = 20 + (silent", "deleting', ) subparser = subparsers.add_parser( 'info', description=host_info.__doc__, ) subparser.set_defaults(func=host_info) subparser.add_argument(", "description=vm_sync.__doc__, ) subparser.set_defaults(func=vm_sync) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "vm_delete, vm_migrate, vm_rename, vm_restart, vm_start, vm_stop, vm_sync, vm_define, ) from", "anyway. Hint: If igvm fails to find ' 'a matching", "only with --offline --offline-transport=xfs', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules", "'hypervisor_hostname', nargs='?', default=None, help='Hostname of destination hypervisor', ) subparser.add_argument( '--run-puppet',", "nargs='*', help='Migrate VMs matching the given serveradmin function offline', )", "would be done' ) subparser.add_argument( '--offline', nargs='*', help='Migrate VMs matching", "state online_reserved', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all preferences", "= IGVMArgumentParser('igvm') top_parser.add_argument('--silent', '-s', action='count', default=0) top_parser.add_argument('--verbose', '-v', action='count', default=0)", "parse_args() configure_root_logger(args.pop('silent'), args.pop('verbose')) try: args.pop('func')(**args) finally: # Fabric requires the", "Python logging library documentation [1] for the levels. # Paramiko", "'Works only with --offline --offline-transport=xfs', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true',", "one subparser_action, but better safe # than sorry. for subparsers_action", "disk_set, evacuate, host_info, mem_set, vcpu_set, vm_build, vm_delete, vm_migrate, vm_rename, vm_restart,", "subparser.set_defaults(func=vm_build) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "of the guest system', ) subparser.add_argument( 'hypervisor_hostname', nargs='?', default=None, help='Hostname", "msg def parse_args(): top_parser = IGVMArgumentParser('igvm') top_parser.add_argument('--silent', '-s', action='count', default=0)", "our chance to disconnect from # the hypervisors. disconnect_all() close_virtconns()", "The command line interface Copyright (c) 2017 InnoGames GmbH \"\"\"", "Hypervisor, but you ' 'are in urgent need to do", "verbose arguments in here. It # is not really meaningful", "l in subparser .get_default('func').__doc__.strip().splitlines() )) out.append('\\n\\t{}'.format(subparser.format_usage())) return '\\n'.join(out) class IGVMLogHandler(StreamHandler):", "' 'are in urgent need to do it anyway. Hint:", "format messages short-cutting Formatters\"\"\" def __init__(self, *args, **kwargs): super(IGVMLogHandler, self).__init__(*args,", "subparser.set_defaults(func=vm_start) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "help='Hostname of the hypervisor', ) subparser.add_argument( 'dst_hv_hostname', nargs='?', default=None, help='Hostname", "'new_address', help=( 'New IPv4 address of VM' ) ) subparser.add_argument(", "type=int, help='New number of CPUs', ) subparser.add_argument( '--offline', action='store_true', help='Shutdown", "evacuate, host_info, mem_set, vcpu_set, vm_build, vm_delete, vm_migrate, vm_rename, vm_restart, vm_start,", "mode', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow building on a", "restart VM', ) subparser = subparsers.add_parser( 'vcpu-set', description=vcpu_set.__doc__, ) subparser.set_defaults(func=vcpu_set)", "StreamHandler, root as root_logger import time from fabric.network import disconnect_all", "if not any(isinstance(a, _SubParsersAction) for a in self._actions): return super(IGVMArgumentParser,", "msg = '{}: {}: {}'.format(level, record.name, record.getMessage()) if self.isatty and", "subparser = subparsers.add_parser( 'build', description=vm_build.__doc__, ) subparser.set_defaults(func=vm_build) subparser.add_argument( 'vm_hostname', help='Hostname", "subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( '--unretire',", "optional unit (default is MiB).' 'Only integers are allowed.' ),", "\"retired\" on Serveradmin instead of deleting', ) subparser = subparsers.add_parser(", "subparser = subparsers.add_parser( 'evacuate', description=evacuate.__doc__, ) subparser.set_defaults(func=evacuate) subparser.add_argument( 'hv_hostname', help='Hostname", "hypervisor', ) subparser.add_argument( '--dry-run', action='store_true', help='Do not migrate but just", "If igvm fails to find ' 'a matching Hypervisor something", "or xfs transport to migrate ' 'disk image' ), )", ") subparser.set_defaults(func=vm_build) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "it', ) subparser = subparsers.add_parser( 'restart', description=vm_restart.__doc__, ) subparser.set_defaults(func=vm_restart) subparser.add_argument(", "the silent and verbose arguments in here. It # is", "dest='allow_reserved_hv', action='store_true', help='Allow building on a Host which has the", "action='store_true', help='Do not wait for guest to shutdown gracefully', )", "IP address', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migration to", "import ArgumentParser, _SubParsersAction from logging import StreamHandler, root as root_logger", "to find a matching Hypervisor, but you ' 'are in", "wait for guest to shutdown gracefully', ) subparser.add_argument( '--retire', action='store_true',", "little while to avoid this. time.sleep(0.1) def configure_root_logger(silent, verbose): root_logger.addHandler(IGVMLogHandler())", ") subparser.add_argument( 'dst_hv_hostname', nargs='?', default=None, help='Hostname of destination hypervisor', )", "a in self._actions): return super(IGVMArgumentParser, self).format_help() out = [] out.append(ColorFormatters.BOLD.format(__doc__))", "guest to shutdown gracefully', ) subparser.add_argument( '--no-redefine', action='store_true', help='Do not", "better. See Python logging library documentation [1] for the levels.", "subparser.add_argument( '--offline', action='store_true', help='Perform IP address change offline', ) subparser.add_argument(", "'--offline', nargs='*', help='Migrate VMs matching the given serveradmin function offline',", "subparsers_action.choices.items(): out.append(ColorFormatters.BOLD.format(choice)) if subparser.get_default('func').__doc__: out.append('\\n'.join( '\\t{}'.format(l.strip()) for l in subparser", "running puppet in chroot before powering up', ) subparser.add_argument( '--debug-puppet',", "of the guest system', ) subparser.add_argument( 'new_hostname', help='New hostname', )", "'--force', action='store_true', help='Do not wait for guest to shutdown gracefully',", "subparsers and print help for choice, subparser in subparsers_action.choices.items(): out.append(ColorFormatters.BOLD.format(choice))", "from logging import StreamHandler, root as root_logger import time from", "it fails finding a Hypervisor.', ) subparser = subparsers.add_parser( 'define',", "the state online_reserved', ) subparser.add_argument( '--rebuild', dest='rebuild', action='store_true', help='Rebuild already", "urgent need to do it anyway. Hint: If igvm fails", "'New memory size with optional unit (default is MiB).' 'Only", "Fabric, Paramiko, raises an error, on # destruction right after", "not migrate but just print what would be done' )", "self._actions if isinstance(action, _SubParsersAction) ] # There will probably only", "in here. It # is not really meaningful to use", "ERROR = '\\033[1;31m{}\\033[0m' CRITICAL = '\\033[1;41m{}\\033[0m' class IGVMArgumentParser(ArgumentParser): def format_help(self):", "has the state online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd', help=( 'Specify", "= subparsers.add_parser( 'evacuate', description=evacuate.__doc__, ) subparser.set_defaults(func=evacuate) subparser.add_argument( 'hv_hostname', help='Hostname of", "help='Set VM state to \"retired\" on Serveradmin instead of deleting',", "shutdown gracefully', ) subparser.add_argument( '--retire', action='store_true', help='Retire VM after stopping", "be really wrong. Run igvm ' 'with --verbose to check", "subparsers.add_parser( 'rename', description=vm_rename.__doc__, ) subparser.set_defaults(func=vm_rename) subparser.add_argument( 'vm_hostname', help='Hostname of the", ") subparser.add_argument( '--no-redefine', action='store_true', help='Do not redefine the domain to", "= subparsers.add_parser( 'vcpu-set', description=vcpu_set.__doc__, ) subparser.set_defaults(func=vcpu_set) subparser.add_argument( 'vm_hostname', help='Hostname of", "- The command line interface Copyright (c) 2017 InnoGames GmbH", "offline migration', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migration to", "really wrong. Run igvm ' 'with --verbose to check why", "subparser .get_default('func').__doc__.strip().splitlines() )) out.append('\\n\\t{}'.format(subparser.format_usage())) return '\\n'.join(out) class IGVMLogHandler(StreamHandler): \"\"\"Extend StreamHandler", ") subparser = subparsers.add_parser( 'restart', description=vm_restart.__doc__, ) subparser.set_defaults(func=vm_restart) subparser.add_argument( 'vm_hostname',", "redefine the domain to use latest hypervisor settings', ) subparser", "= subparsers.add_parser( 'change-address', description=disk_set.__doc__, ) subparser.set_defaults(func=change_address) subparser.add_argument( 'vm_hostname', help='Hostname of", "be one subparser_action, but better safe # than sorry. for", ") subparser.add_argument( '--unretire', nargs='?', const='maintenance', help='Unretire a VM, set it", "subparser.add_argument( 'new_hostname', help='New hostname', ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM,", "subparser = subparsers.add_parser( 'stop', description=vm_stop.__doc__, ) subparser.set_defaults(func=vm_stop) subparser.add_argument( 'vm_hostname', help='Hostname", "online_reserved', ) subparser.add_argument( '--rebuild', dest='rebuild', action='store_true', help='Rebuild already defined VM", "help=( 'Don\\'t shutdown VM during offline migration, igvm will wait", "action='store_true', help='Shutdown VM, change memory, and restart VM', ) subparser", "if subparser.get_default('func').__doc__: out.append('\\n'.join( '\\t{}'.format(l.strip()) for l in subparser .get_default('func').__doc__.strip().splitlines() ))", "subparsers.add_parser( 'change-address', description=disk_set.__doc__, ) subparser.set_defaults(func=change_address) subparser.add_argument( 'vm_hostname', help='Hostname of the", "is not really meaningful to use them both, but giving", "it if not defined', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules", "need to do it anyway. Hint: If igvm fails to", "subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, if running', ) subparser =", "Formatters\"\"\" def __init__(self, *args, **kwargs): super(IGVMLogHandler, self).__init__(*args, **kwargs) self.isatty =", "description=evacuate.__doc__, ) subparser.set_defaults(func=evacuate) subparser.add_argument( 'hv_hostname', help='Hostname of the hypervisor', )", "BOLD = '\\033[1m{}\\033[0m' WARNING = '\\033[1;33m{}\\033[0m' ERROR = '\\033[1;31m{}\\033[0m' CRITICAL", "subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migration to a Host which", "subparsers.add_parser( 'stop', description=vm_stop.__doc__, ) subparser.set_defaults(func=vm_stop) subparser.add_argument( 'vm_hostname', help='Hostname of the", "hypervisor', ) subparser.add_argument( '--run-puppet', action='store_true', help='Run puppet in chroot before", "[ action for action in self._actions if isinstance(action, _SubParsersAction) ]", "disconnect_all() close_virtconns() # The underlying library of Fabric, Paramiko, raises", "called. We are # sleeping for a little while to", "class IGVMArgumentParser(ArgumentParser): def format_help(self): if not any(isinstance(a, _SubParsersAction) for a", "do it anyway. Hint: If igvm fails to find '", "to \"retired\" on Serveradmin instead of deleting', ) subparser =", "20 + (silent - verbose) * 10 root_logger.setLevel(level) root_logger.getChild('paramiko').setLevel(level +", "close_virtconns class ColorFormatters(): BOLD = '\\033[1m{}\\033[0m' WARNING = '\\033[1;33m{}\\033[0m' ERROR", "= '{}: {}: {}'.format(level, record.name, record.getMessage()) if self.isatty and level", "with \"+\". Only integers are allowed' ) ) subparser =", "levels. # Paramiko is overly verbose. We configure it for", "\"\"\"Extend StreamHandler to format messages short-cutting Formatters\"\"\" def __init__(self, *args,", "subparser.add_argument( 'new_address', help=( 'New IPv4 address of VM' ) )", "migrate disk image' ), ) subparser = subparsers.add_parser( 'disk-set', description=disk_set.__doc__,", "help='Perform IP address change offline', ) subparser.add_argument( '--migrate', action='store_true', help='Migrate", "help='Unretire a VM, set it to given state, maintenance by", "*args, **kwargs): super(IGVMLogHandler, self).__init__(*args, **kwargs) self.isatty = self.stream.isatty() def format(self,", "subparser.get_default('func').__doc__: out.append('\\n'.join( '\\t{}'.format(l.strip()) for l in subparser .get_default('func').__doc__.strip().splitlines() )) out.append('\\n\\t{}'.format(subparser.format_usage()))", "disconnect from # the hypervisors. disconnect_all() close_virtconns() # The underlying", ") subparser = subparsers.add_parser( 'evacuate', description=evacuate.__doc__, ) subparser.set_defaults(func=evacuate) subparser.add_argument( 'hv_hostname',", "not # better. See Python logging library documentation [1] for", "action='store_true', help='Retire VM after stopping it', ) subparser = subparsers.add_parser(", "subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( '--postboot',", ") subparser.add_argument( '--enforce-vm-env', dest='enforce_vm_env', action='store_true', help='Build or migrate VM only", "fails finding a Hypervisor.', ) subparser = subparsers.add_parser( 'change-address', description=disk_set.__doc__,", "finally: # Fabric requires the disconnect function to be called", "help='Allow building on a Host which has the state online_reserved',", "something might be really wrong. Run igvm ' 'with --verbose", "finding a Hypervisor.', ) subparser = subparsers.add_parser( 'change-address', description=disk_set.__doc__, )", "description=vcpu_set.__doc__, ) subparser.set_defaults(func=vcpu_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "help='Migrate VMs matching the given serveradmin function offline', ) subparser.add_argument(", ") subparser.add_argument( 'size', help=( 'New memory size with optional unit", "the guest system', ) subparser.add_argument( 'size', help=( 'New disk size", "# The underlying library of Fabric, Paramiko, raises an error,", "this. time.sleep(0.1) def configure_root_logger(silent, verbose): root_logger.addHandler(IGVMLogHandler()) # We are summing", "igvm.commands import ( change_address, disk_set, evacuate, host_info, mem_set, vcpu_set, vm_build,", "hypervisor', ) subparser.add_argument( 'dst_hv_hostname', nargs='?', default=None, help='Hostname of destination hypervisor',", "metavar='postboot_script', help='Run postboot_script on the guest after first boot', )", "vm_build, vm_delete, vm_migrate, vm_rename, vm_restart, vm_start, vm_stop, vm_sync, vm_define, )", "host which has the state online_reserved', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences',", "subparsers.add_parser( 'build', description=vm_build.__doc__, ) subparser.set_defaults(func=vm_build) subparser.add_argument( 'vm_hostname', help='Hostname of the", "'--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all preferences so that Hypervisors are", "subparser.add_argument( '--migrate', action='store_true', help='Migrate VM to new HV while changing", "vcpu_set, vm_build, vm_delete, vm_migrate, vm_rename, vm_restart, vm_start, vm_stop, vm_sync, vm_define,", "of the guest system', ) subparser.add_argument( '--force', action='store_true', help='Do not", "'evacuate', description=evacuate.__doc__, ) subparser.set_defaults(func=evacuate) subparser.add_argument( 'hv_hostname', help='Hostname of the hypervisor',", "HV while changing IP address', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true',", "subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( '--force',", "online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd', help=( 'Specify drbd (default) or", "= subparsers.add_parser( 'delete', description=vm_delete.__doc__, ) subparser.set_defaults(func=vm_delete) subparser.add_argument( 'vm_hostname', help='Hostname of", "# Fabric requires the disconnect function to be called after", "help='Skip running puppet in chroot before powering up', ) subparser.add_argument(", "in debug mode', ) subparser.add_argument( '--offline', action='store_true', help='Force offline migration',", "subparser = subparsers.add_parser( 'sync', description=vm_sync.__doc__, ) subparser.set_defaults(func=vm_sync) subparser.add_argument( 'vm_hostname', help='Hostname", "given serveradmin function offline', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow", "def __init__(self, *args, **kwargs): super(IGVMLogHandler, self).__init__(*args, **kwargs) self.isatty = self.stream.isatty()", "the guest system', ) subparser.add_argument( 'hypervisor_hostname', nargs='?', default=None, help='Hostname of", ") subparser.add_argument( '--rebuild', dest='rebuild', action='store_true', help='Rebuild already defined VM or", "function offline', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migrating to", "disk image' ), ) subparser = subparsers.add_parser( 'disk-set', description=disk_set.__doc__, )", "'--skip-puppet', action='store_false', dest='run_puppet', help='Skip running puppet in chroot before powering", "subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow building on a Host which", "during offline migration, igvm will wait for' ' operator to", "also taking our chance to disconnect from # the hypervisors.", ") subparser.add_argument( '--offline-transport', default='drbd', choices=('drbd', 'netcat', 'xfs'), help=( 'Specify drbd", "igvm.libvirt import close_virtconns class ColorFormatters(): BOLD = '\\033[1m{}\\033[0m' WARNING =", "nargs='?', default=None, help='Hostname of destination hypervisor', ) subparser.add_argument( '--dry-run', action='store_true',", "find ' 'a matching Hypervisor something might be really wrong.", "transport to migrate disk image' ), ) subparser = subparsers.add_parser(", "so that Hypervisors are not excluded. ' 'Use this if", "which has the state online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd', help=(", "print what would be done' ) subparser.add_argument( '--offline', nargs='*', help='Migrate", "or build it if not defined', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences',", "# sleeping for a little while to avoid this. time.sleep(0.1)", "2017 InnoGames GmbH \"\"\" from __future__ import print_function from argparse", ".get_default('func').__doc__.strip().splitlines() )) out.append('\\n\\t{}'.format(subparser.format_usage())) return '\\n'.join(out) class IGVMLogHandler(StreamHandler): \"\"\"Extend StreamHandler to", "subparser.add_argument( '--disk-size', dest='disk_size', type=int, help='Resize disk of migrated VM. Expects", "Hypervisor.', ) subparser = subparsers.add_parser( 'define', description=vm_define.__doc__, ) subparser.set_defaults(func=vm_define) subparser.add_argument('vm_hostname',", "system', ) subparser.add_argument( 'new_hostname', help='New hostname', ) subparser.add_argument( '--offline', action='store_true',", "chance to disconnect from # the hypervisors. disconnect_all() close_virtconns() #", "memory size with optional unit (default is MiB).' 'Only integers", "building on a Host which has the state online_reserved', )", "help='Shutdown VM, if running', ) subparser = subparsers.add_parser( 'evacuate', description=evacuate.__doc__,", "'count', type=int, help='New number of CPUs', ) subparser.add_argument( '--offline', action='store_true',", "of the guest system', ) subparser.add_argument( '--postboot', metavar='postboot_script', help='Run postboot_script", "subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( 'new_address',", "a Hypervisor.', ) subparser = subparsers.add_parser( 'migrate', description=vm_migrate.__doc__, ) subparser.set_defaults(func=vm_migrate)", "subparser.add_argument( '--skip-puppet', action='store_false', dest='run_puppet', help='Skip running puppet in chroot before", "verbose): root_logger.addHandler(IGVMLogHandler()) # We are summing up the silent and", "record.levelname msg = '{}: {}: {}'.format(level, record.name, record.getMessage()) if self.isatty", "action='store_true', help=( 'Don\\'t shutdown VM during offline migration, igvm will", "action='store_true', help='Do not migrate but just print what would be", "subparser = subparsers.add_parser( 'mem-set', description=mem_set.__doc__, ) subparser.set_defaults(func=mem_set) subparser.add_argument( 'vm_hostname', help='Hostname", "(default is MiB).' 'Only integers are allowed.' ), ) subparser.add_argument(", "super(IGVMArgumentParser, self).format_help() out = [] out.append(ColorFormatters.BOLD.format(__doc__)) out.append('Available commands:\\n') subparsers_actions =", "help='Hostname of the guest system', ) subparser = subparsers.add_parser( 'rename',", "vm_restart, vm_start, vm_stop, vm_sync, vm_define, ) from igvm.libvirt import close_virtconns", "Paramiko is overly verbose. We configure it for one level", "guest system', ) subparser.add_argument( 'count', type=int, help='New number of CPUs',", "for guest to shutdown gracefully', ) subparser.add_argument( '--retire', action='store_true', help='Retire", "= subparsers.add_parser( 'define', description=vm_define.__doc__, ) subparser.set_defaults(func=vm_define) subparser.add_argument('vm_hostname', help='Hostname of the", "the hypervisors. disconnect_all() close_virtconns() # The underlying library of Fabric,", "for a little while to avoid this. time.sleep(0.1) def configure_root_logger(silent,", "Hypervisors are not excluded. ' 'Use this if igvm fails", "state online_reserved', ) subparser.add_argument( '--rebuild', dest='rebuild', action='store_true', help='Rebuild already defined", "'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( 'hypervisor_hostname', nargs='?',", "vm_rename, vm_restart, vm_start, vm_stop, vm_sync, vm_define, ) from igvm.libvirt import", "'--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migration to a Host which has", "and restart VM', ) subparser = subparsers.add_parser( 'vcpu-set', description=vcpu_set.__doc__, )", "subparser.add_argument( '--unretire', nargs='?', const='maintenance', help='Unretire a VM, set it to", "def parse_args(): top_parser = IGVMArgumentParser('igvm') top_parser.add_argument('--silent', '-s', action='count', default=0) top_parser.add_argument('--verbose',", "subparser.add_argument( '--retire', action='store_true', help='Set VM state to \"retired\" on Serveradmin", "description=vm_define.__doc__, ) subparser.set_defaults(func=vm_define) subparser.add_argument('vm_hostname', help='Hostname of the guest system') return", "subparsers.add_parser( 'sync', description=vm_sync.__doc__, ) subparser.set_defaults(func=vm_sync) subparser.add_argument( 'vm_hostname', help='Hostname of the", "debug mode', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow building on", "action='store_true', help='Overrules all preferences so that Hypervisors are not excluded.", "top_parser.add_argument('--verbose', '-v', action='count', default=0) subparsers = top_parser.add_subparsers(help='Actions') subparser = subparsers.add_parser(", "will wait for' ' operator to shut down VM for", "in debug mode', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow building", "subparser.add_argument( '--no-shutdown', action='store_true', help=( 'Don\\'t shutdown VM during offline migration,", "description=vm_stop.__doc__, ) subparser.set_defaults(func=vm_stop) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "def main(): args = parse_args() configure_root_logger(args.pop('silent'), args.pop('verbose')) try: args.pop('func')(**args) finally:", "= '\\033[1;31m{}\\033[0m' CRITICAL = '\\033[1;41m{}\\033[0m' class IGVMArgumentParser(ArgumentParser): def format_help(self): if", "the state online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd', help=( 'Specify drbd", "default', ) subparser = subparsers.add_parser( 'stop', description=vm_stop.__doc__, ) subparser.set_defaults(func=vm_stop) subparser.add_argument(", "guest to shutdown gracefully', ) subparser.add_argument( '--retire', action='store_true', help='Retire VM", "nargs='?', const='maintenance', help='Unretire a VM, set it to given state,", "help='Do not redefine the domain to use latest hypervisor settings',", "transport to migrate ' 'disk image' ), ) subparser.add_argument( '--no-shutdown',", "help='Do not migrate but just print what would be done'", "default=None, help='Hostname of destination hypervisor', ) subparser.add_argument( '--run-puppet', action='store_true', help='Run", "an optional unit (default GiB). ' 'Can be specified relative", "a host which has the state online_reserved', ) subparser.add_argument( '--soft-preferences',", "system', ) subparser.add_argument( 'count', type=int, help='New number of CPUs', )", "guest system') return vars(top_parser.parse_args()) def main(): args = parse_args() configure_root_logger(args.pop('silent'),", "already defined VM or build it if not defined', )", "= record.levelname msg = '{}: {}: {}'.format(level, record.name, record.getMessage()) if", "subparser.add_argument( '--offline', action='store_true', help='Force offline migration', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv',", "'sync', description=vm_sync.__doc__, ) subparser.set_defaults(func=vm_sync) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "'with --verbose to check why it fails finding a Hypervisor.',", "default='drbd', choices=('drbd', 'netcat', 'xfs'), help=( 'Specify drbd (default), netcat or", "hostname', ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, if running', )", "the disconnect function to be called after every # use.", "'--disk-size', dest='disk_size', type=int, help='Resize disk of migrated VM. Expects new", ") subparser = subparsers.add_parser( 'migrate', description=vm_migrate.__doc__, ) subparser.set_defaults(func=vm_migrate) subparser.add_argument( 'vm_hostname',", "subparsers.add_parser( 'vcpu-set', description=vcpu_set.__doc__, ) subparser.set_defaults(func=vcpu_set) subparser.add_argument( 'vm_hostname', help='Hostname of the", "description=vm_start.__doc__, ) subparser.set_defaults(func=vm_start) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "if isinstance(action, _SubParsersAction) ] # There will probably only be", "= subparsers.add_parser( 'migrate', description=vm_migrate.__doc__, ) subparser.set_defaults(func=vm_migrate) subparser.add_argument( 'vm_hostname', help='Hostname of", "on Serveradmin instead of deleting', ) subparser = subparsers.add_parser( 'info',", "print help for choice, subparser in subparsers_action.choices.items(): out.append(ColorFormatters.BOLD.format(choice)) if subparser.get_default('func').__doc__:", "We are also taking our chance to disconnect from #", "main(): args = parse_args() configure_root_logger(args.pop('silent'), args.pop('verbose')) try: args.pop('func')(**args) finally: #", "instead of deleting', ) subparser = subparsers.add_parser( 'info', description=host_info.__doc__, )", "the guest system', ) subparser.add_argument( 'new_hostname', help='New hostname', ) subparser.add_argument(", "a little while to avoid this. time.sleep(0.1) def configure_root_logger(silent, verbose):", "self).format_help() out = [] out.append(ColorFormatters.BOLD.format(__doc__)) out.append('Available commands:\\n') subparsers_actions = [", "top_parser.add_argument('--silent', '-s', action='count', default=0) top_parser.add_argument('--verbose', '-v', action='count', default=0) subparsers =", "for the levels. # Paramiko is overly verbose. We configure", "a Host which has the state online_reserved', ) subparser.add_argument( '--offline-transport',", "'size', help=( 'New memory size with optional unit (default is", "= [] out.append(ColorFormatters.BOLD.format(__doc__)) out.append('Available commands:\\n') subparsers_actions = [ action for", ") subparser = subparsers.add_parser( 'start', description=vm_start.__doc__, ) subparser.set_defaults(func=vm_start) subparser.add_argument( 'vm_hostname',", "choice, subparser in subparsers_action.choices.items(): out.append(ColorFormatters.BOLD.format(choice)) if subparser.get_default('func').__doc__: out.append('\\n'.join( '\\t{}'.format(l.strip()) for", "VM only to a HV with the same environment of", "'--no-redefine', action='store_true', help='Do not redefine the domain to use latest", "super(IGVMLogHandler, self).__init__(*args, **kwargs) self.isatty = self.stream.isatty() def format(self, record): level", "+ (silent - verbose) * 10 root_logger.setLevel(level) root_logger.getChild('paramiko').setLevel(level + 10)", "interface Copyright (c) 2017 InnoGames GmbH \"\"\" from __future__ import", "import StreamHandler, root as root_logger import time from fabric.network import", "drbd (default), netcat or xfs transport to migrate ' 'disk", ") subparser.set_defaults(func=vm_start) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "from __future__ import print_function from argparse import ArgumentParser, _SubParsersAction from", "from # the hypervisors. disconnect_all() close_virtconns() # The underlying library", ") ) subparser.add_argument( '--offline', action='store_true', help='Perform IP address change offline',", "wait for guest to shutdown gracefully', ) subparser.add_argument( '--no-redefine', action='store_true',", ") subparser = subparsers.add_parser( 'change-address', description=disk_set.__doc__, ) subparser.set_defaults(func=change_address) subparser.add_argument( 'vm_hostname',", "description=vm_migrate.__doc__, ) subparser.set_defaults(func=vm_migrate) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", ") subparser.set_defaults(func=change_address) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "a Host which has the state online_reserved', ) subparser.add_argument( '--rebuild',", "It # is not really meaningful to use them both,", "subparser = subparsers.add_parser( 'change-address', description=disk_set.__doc__, ) subparser.set_defaults(func=change_address) subparser.add_argument( 'vm_hostname', help='Hostname", "subparser.add_argument( '--dry-run', action='store_true', help='Do not migrate but just print what", ") subparser.set_defaults(func=vm_define) subparser.add_argument('vm_hostname', help='Hostname of the guest system') return vars(top_parser.parse_args())", "--offline --offline-transport=xfs', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all preferences", "vars(top_parser.parse_args()) def main(): args = parse_args() configure_root_logger(args.pop('silent'), args.pop('verbose')) try: args.pop('func')(**args)", "' 'disk image' ), ) subparser.add_argument( '--no-shutdown', action='store_true', help=( 'Don\\'t", "from fabric.network import disconnect_all from igvm.commands import ( change_address, disk_set,", ") subparser.add_argument( '--run-puppet', action='store_true', help='Run puppet in chroot before powering", "We are # sleeping for a little while to avoid", "help='Hostname of the guest system', ) subparser.add_argument( '--postboot', metavar='postboot_script', help='Run", "'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( 'new_hostname', help='New", "subparser_action, but better safe # than sorry. for subparsers_action in", "dest='soft_preferences', action='store_true', help='Overrules all preferences so that Hypervisors are not", "size in GiB. ' 'Works only with --offline --offline-transport=xfs', )", "subparsers.add_parser( 'start', description=vm_start.__doc__, ) subparser.set_defaults(func=vm_start) subparser.add_argument( 'vm_hostname', help='Hostname of the", "try: args.pop('func')(**args) finally: # Fabric requires the disconnect function to", "messages short-cutting Formatters\"\"\" def __init__(self, *args, **kwargs): super(IGVMLogHandler, self).__init__(*args, **kwargs)", "with an optional unit (default GiB). ' 'Can be specified", "description=disk_set.__doc__, ) subparser.set_defaults(func=change_address) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "be called after every # use. We are also taking", "subparsers.add_parser( 'disk-set', description=disk_set.__doc__, ) subparser.set_defaults(func=disk_set) subparser.add_argument( 'vm_hostname', help='Hostname of the", "getattr(ColorFormatters, level).format(msg) return msg def parse_args(): top_parser = IGVMArgumentParser('igvm') top_parser.add_argument('--silent',", "help='Run postboot_script on the guest after first boot', ) subparser.add_argument(", "_SubParsersAction from logging import StreamHandler, root as root_logger import time", "_SubParsersAction) ] # There will probably only be one subparser_action,", "= self.stream.isatty() def format(self, record): level = record.levelname msg =", "powering up', ) subparser.add_argument( '--debug-puppet', action='store_true', help='Run puppet in debug", "in urgent need to do it anyway. Hint: If igvm", "VM', ) subparser = subparsers.add_parser( 'start', description=vm_start.__doc__, ) subparser.set_defaults(func=vm_start) subparser.add_argument(", "description=host_info.__doc__, ) subparser.set_defaults(func=host_info) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "vm_migrate, vm_rename, vm_restart, vm_start, vm_stop, vm_sync, vm_define, ) from igvm.libvirt", "are # sleeping for a little while to avoid this.", "subparser.add_argument( 'hypervisor_hostname', nargs='?', default=None, help='Hostname of destination hypervisor', ) subparser.add_argument(", "is MiB).' 'Only integers are allowed.' ), ) subparser.add_argument( '--offline',", "(c) 2017 InnoGames GmbH \"\"\" from __future__ import print_function from", "dest='enforce_vm_env', action='store_true', help='Build or migrate VM only to a HV", "help='Hostname of the guest system', ) subparser.add_argument( 'new_hostname', help='New hostname',", "description=vm_rename.__doc__, ) subparser.set_defaults(func=vm_rename) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "matching Hypervisor something might be really wrong. Run igvm '", "help='Allow migrating to a host which has the state online_reserved',", "with optional unit (default is MiB).' 'Only integers are allowed.'", "VM or build it if not defined', ) subparser.add_argument( '--soft-preferences',", "to check why it fails finding a Hypervisor.', ) subparser", "subparsers_actions = [ action for action in self._actions if isinstance(action,", "help='Allow migration to a Host which has the state online_reserved',", "integers are allowed' ) ) subparser = subparsers.add_parser( 'mem-set', description=mem_set.__doc__,", "subparsers.add_parser( 'delete', description=vm_delete.__doc__, ) subparser.set_defaults(func=vm_delete) subparser.add_argument( 'vm_hostname', help='Hostname of the", ") subparser.add_argument( '--debug-puppet', action='store_true', help='Run puppet in debug mode', )", "'--migrate', action='store_true', help='Migrate VM to new HV while changing IP", "See Python logging library documentation [1] for the levels. #", "return msg def parse_args(): top_parser = IGVMArgumentParser('igvm') top_parser.add_argument('--silent', '-s', action='count',", "system', ) subparser.add_argument( '--postboot', metavar='postboot_script', help='Run postboot_script on the guest", "'change-address', description=disk_set.__doc__, ) subparser.set_defaults(func=change_address) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "help='Hostname of the guest system', ) subparser = subparsers.add_parser( 'sync',", "but better safe # than sorry. for subparsers_action in subparsers_actions:", "help='Hostname of destination hypervisor', ) subparser.add_argument( '--run-puppet', action='store_true', help='Run puppet", "overly verbose. We configure it for one level higher. #", "action in self._actions if isinstance(action, _SubParsersAction) ] # There will", "relative with \"+\". Only integers are allowed' ) ) subparser", "subparser = subparsers.add_parser( 'start', description=vm_start.__doc__, ) subparser.set_defaults(func=vm_start) subparser.add_argument( 'vm_hostname', help='Hostname", "migration, igvm will wait for' ' operator to shut down", "are not excluded. ' 'Use this if igvm fails to", "help='Build or migrate VM only to a HV with the", "puppet in debug mode', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow", "\"+\". Only integers are allowed' ) ) subparser = subparsers.add_parser(", "system', ) subparser.add_argument( '--force', action='store_true', help='Do not wait for guest", "wrong. Run igvm ' 'with --verbose to check why it", "guest after first boot', ) subparser.add_argument( '--skip-puppet', action='store_false', dest='run_puppet', help='Skip", "change offline', ) subparser.add_argument( '--migrate', action='store_true', help='Migrate VM to new", "up', ) subparser.add_argument( '--debug-puppet', action='store_true', help='Run puppet in debug mode',", "'new_hostname', help='New hostname', ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, if", "' 'with --verbose to check why it fails finding a", "stopping it', ) subparser = subparsers.add_parser( 'restart', description=vm_restart.__doc__, ) subparser.set_defaults(func=vm_restart)", "description=vm_delete.__doc__, ) subparser.set_defaults(func=vm_delete) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "restart VM', ) subparser = subparsers.add_parser( 'start', description=vm_start.__doc__, ) subparser.set_defaults(func=vm_start)", "VMs matching the given serveradmin function offline', ) subparser.add_argument( '--ignore-reserved',", "# [1] https://docs.python.org/library/logging.html#logging-levels level = 20 + (silent - verbose)", "= '\\033[1;33m{}\\033[0m' ERROR = '\\033[1;31m{}\\033[0m' CRITICAL = '\\033[1;41m{}\\033[0m' class IGVMArgumentParser(ArgumentParser):", "'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( 'new_address', help=(", "system', ) subparser.add_argument( 'hypervisor_hostname', nargs='?', default=None, help='Hostname of destination hypervisor',", "disconnect function to be called after every # use. We", "after stopping it', ) subparser = subparsers.add_parser( 'restart', description=vm_restart.__doc__, )", ") subparser.add_argument( 'new_address', help=( 'New IPv4 address of VM' )", "if igvm fails to find a matching Hypervisor, but you", "= subparsers.add_parser( 'restart', description=vm_restart.__doc__, ) subparser.set_defaults(func=vm_restart) subparser.add_argument( 'vm_hostname', help='Hostname of", "dest='disk_size', type=int, help='Resize disk of migrated VM. Expects new size", "guest system', ) subparser.add_argument( '--retire', action='store_true', help='Set VM state to", "description=vm_build.__doc__, ) subparser.set_defaults(func=vm_build) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "VM' ) ) subparser.add_argument( '--offline', action='store_true', help='Perform IP address change", "' 'a matching Hypervisor something might be really wrong. Run", "the guest system', ) subparser.add_argument( 'size', help=( 'New memory size", "matching the given serveradmin function offline', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv',", "action='store_true', help='Allow migration to a Host which has the state", "to migrate ' 'disk image' ), ) subparser.add_argument( '--no-shutdown', action='store_true',", "import time from fabric.network import disconnect_all from igvm.commands import (", "environment of VM' ) subparser.add_argument( '--disk-size', dest='disk_size', type=int, help='Resize disk", "Paramiko, raises an error, on # destruction right after the", "action='store_true', help='Build or migrate VM only to a HV with", "def configure_root_logger(silent, verbose): root_logger.addHandler(IGVMLogHandler()) # We are summing up the", "self._actions): return super(IGVMArgumentParser, self).format_help() out = [] out.append(ColorFormatters.BOLD.format(__doc__)) out.append('Available commands:\\n')", "of migrated VM. Expects new size in GiB. ' 'Works", "action='store_true', help='Set VM state to \"retired\" on Serveradmin instead of", "] # There will probably only be one subparser_action, but", "address of VM' ) ) subparser.add_argument( '--offline', action='store_true', help='Perform IP", "self.isatty and level in vars(ColorFormatters): msg = getattr(ColorFormatters, level).format(msg) return", "for guest to shutdown gracefully', ) subparser.add_argument( '--no-redefine', action='store_true', help='Do", "which has the state online_reserved', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true',", "subparser.set_defaults(func=disk_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "is overly verbose. We configure it for one level higher.", "shutdown gracefully', ) subparser.add_argument( '--no-redefine', action='store_true', help='Do not redefine the", "default=0) top_parser.add_argument('--verbose', '-v', action='count', default=0) subparsers = top_parser.add_subparsers(help='Actions') subparser =", "migration', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migration to a", "new size in GiB. ' 'Works only with --offline --offline-transport=xfs',", "= 20 + (silent - verbose) * 10 root_logger.setLevel(level) root_logger.getChild('paramiko').setLevel(level", "subparser in subparsers_action.choices.items(): out.append(ColorFormatters.BOLD.format(choice)) if subparser.get_default('func').__doc__: out.append('\\n'.join( '\\t{}'.format(l.strip()) for l", "on the guest after first boot', ) subparser.add_argument( '--skip-puppet', action='store_false',", "shut down VM for 24h.' ), ) subparser.add_argument( '--enforce-vm-env', dest='enforce_vm_env',", "help=( 'Specify drbd (default), netcat or xfs transport to migrate", "out.append(ColorFormatters.BOLD.format(__doc__)) out.append('Available commands:\\n') subparsers_actions = [ action for action in", "a HV with the same environment of VM' ) subparser.add_argument(", "HV with the same environment of VM' ) subparser.add_argument( '--disk-size',", "of the guest system', ) subparser = subparsers.add_parser( 'rename', description=vm_rename.__doc__,", "destination hypervisor', ) subparser.add_argument( '--dry-run', action='store_true', help='Do not migrate but", "subparsers_action in subparsers_actions: # Get all subparsers and print help", "help='Shutdown VM, change CPUs, and restart VM', ) subparser =", "'rename', description=vm_rename.__doc__, ) subparser.set_defaults(func=vm_rename) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "for l in subparser .get_default('func').__doc__.strip().splitlines() )) out.append('\\n\\t{}'.format(subparser.format_usage())) return '\\n'.join(out) class", "igvm will wait for' ' operator to shut down VM", ") subparser.set_defaults(func=mem_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "up the silent and verbose arguments in here. It #", "= subparsers.add_parser( 'mem-set', description=mem_set.__doc__, ) subparser.set_defaults(func=mem_set) subparser.add_argument( 'vm_hostname', help='Hostname of", "preferences so that Hypervisors are not excluded. ' 'Use this", "of deleting', ) subparser = subparsers.add_parser( 'info', description=host_info.__doc__, ) subparser.set_defaults(func=host_info)", "'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( 'count', type=int,", "change CPUs, and restart VM', ) subparser = subparsers.add_parser( 'start',", "of VM' ) subparser.add_argument( '--disk-size', dest='disk_size', type=int, help='Resize disk of", "time.sleep(0.1) def configure_root_logger(silent, verbose): root_logger.addHandler(IGVMLogHandler()) # We are summing up", "'vm_hostname', help='Hostname of the guest system', ) subparser = subparsers.add_parser(", "VM state to \"retired\" on Serveradmin instead of deleting', )", "requires the disconnect function to be called after every #", "meaningful to use them both, but giving an error is", "documentation [1] for the levels. # Paramiko is overly verbose.", "# use. We are also taking our chance to disconnect", "mem_set, vcpu_set, vm_build, vm_delete, vm_migrate, vm_rename, vm_restart, vm_start, vm_stop, vm_sync,", "'-s', action='count', default=0) top_parser.add_argument('--verbose', '-v', action='count', default=0) subparsers = top_parser.add_subparsers(help='Actions')", "description=disk_set.__doc__, ) subparser.set_defaults(func=disk_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", ") subparser = subparsers.add_parser( 'stop', description=vm_stop.__doc__, ) subparser.set_defaults(func=vm_stop) subparser.add_argument( 'vm_hostname',", "description=mem_set.__doc__, ) subparser.set_defaults(func=mem_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system',", "for choice, subparser in subparsers_action.choices.items(): out.append(ColorFormatters.BOLD.format(choice)) if subparser.get_default('func').__doc__: out.append('\\n'.join( '\\t{}'.format(l.strip())", "online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd', choices=('drbd', 'netcat', 'xfs'), help=( 'Specify", "subparser.set_defaults(func=vm_define) subparser.add_argument('vm_hostname', help='Hostname of the guest system') return vars(top_parser.parse_args()) def", "= [ action for action in self._actions if isinstance(action, _SubParsersAction)", "migrated VM. Expects new size in GiB. ' 'Works only", "all subparsers and print help for choice, subparser in subparsers_action.choices.items():", "to format messages short-cutting Formatters\"\"\" def __init__(self, *args, **kwargs): super(IGVMLogHandler,", "settings', ) subparser = subparsers.add_parser( 'delete', description=vm_delete.__doc__, ) subparser.set_defaults(func=vm_delete) subparser.add_argument(", "GmbH \"\"\" from __future__ import print_function from argparse import ArgumentParser,", "short-cutting Formatters\"\"\" def __init__(self, *args, **kwargs): super(IGVMLogHandler, self).__init__(*args, **kwargs) self.isatty", "puppet in debug mode', ) subparser.add_argument( '--offline', action='store_true', help='Force offline", "gracefully', ) subparser.add_argument( '--retire', action='store_true', help='Retire VM after stopping it',", "GiB. ' 'Works only with --offline --offline-transport=xfs', ) subparser.add_argument( '--soft-preferences',", "to a host which has the state online_reserved', ) subparser.add_argument(", "sorry. for subparsers_action in subparsers_actions: # Get all subparsers and", "in self._actions if isinstance(action, _SubParsersAction) ] # There will probably", "InnoGames GmbH \"\"\" from __future__ import print_function from argparse import", ") subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all preferences so that", "# There will probably only be one subparser_action, but better", "error, on # destruction right after the disconnect function is", ") subparser.add_argument( '--disk-size', dest='disk_size', type=int, help='Resize disk of migrated VM.", "it fails finding a Hypervisor.', ) subparser = subparsers.add_parser( 'migrate',", "VM' ) subparser.add_argument( '--disk-size', dest='disk_size', type=int, help='Resize disk of migrated", "subparser = subparsers.add_parser( 'info', description=host_info.__doc__, ) subparser.set_defaults(func=host_info) subparser.add_argument( 'vm_hostname', help='Hostname", "'--retire', action='store_true', help='Set VM state to \"retired\" on Serveradmin instead", "'are in urgent need to do it anyway. Hint: If", ") from igvm.libvirt import close_virtconns class ColorFormatters(): BOLD = '\\033[1m{}\\033[0m'", ") subparser.add_argument( 'new_hostname', help='New hostname', ) subparser.add_argument( '--offline', action='store_true', help='Shutdown", "top_parser = IGVMArgumentParser('igvm') top_parser.add_argument('--silent', '-s', action='count', default=0) top_parser.add_argument('--verbose', '-v', action='count',", "avoid this. time.sleep(0.1) def configure_root_logger(silent, verbose): root_logger.addHandler(IGVMLogHandler()) # We are", "guest system', ) subparser.add_argument( '--unretire', nargs='?', const='maintenance', help='Unretire a VM,", "allowed.' ), ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, change memory,", "the levels. # Paramiko is overly verbose. We configure it", "'\\n'.join(out) class IGVMLogHandler(StreamHandler): \"\"\"Extend StreamHandler to format messages short-cutting Formatters\"\"\"", "operator to shut down VM for 24h.' ), ) subparser.add_argument(", "of the guest system', ) subparser = subparsers.add_parser( 'sync', description=vm_sync.__doc__,", "root_logger import time from fabric.network import disconnect_all from igvm.commands import", "VM, if running', ) subparser = subparsers.add_parser( 'evacuate', description=evacuate.__doc__, )", "function to be called after every # use. We are", ") subparser.set_defaults(func=vcpu_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "import close_virtconns class ColorFormatters(): BOLD = '\\033[1m{}\\033[0m' WARNING = '\\033[1;33m{}\\033[0m'", "guest system', ) subparser.add_argument( 'new_hostname', help='New hostname', ) subparser.add_argument( '--offline',", "an error, on # destruction right after the disconnect function", "The underlying library of Fabric, Paramiko, raises an error, on", ") subparser = subparsers.add_parser( 'vcpu-set', description=vcpu_set.__doc__, ) subparser.set_defaults(func=vcpu_set) subparser.add_argument( 'vm_hostname',", "level = 20 + (silent - verbose) * 10 root_logger.setLevel(level)", "guest system', ) subparser.add_argument( 'hypervisor_hostname', nargs='?', default=None, help='Hostname of destination", "'mem-set', description=mem_set.__doc__, ) subparser.set_defaults(func=mem_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "help='Rebuild already defined VM or build it if not defined',", "help='Migrate VM to new HV while changing IP address', )", "'--offline', action='store_true', help='Force offline migration', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true',", ") subparser.add_argument( '--retire', action='store_true', help='Retire VM after stopping it', )", "it anyway. Hint: If igvm fails to find ' 'a", "subparser = subparsers.add_parser( 'restart', description=vm_restart.__doc__, ) subparser.set_defaults(func=vm_restart) subparser.add_argument( 'vm_hostname', help='Hostname", "time from fabric.network import disconnect_all from igvm.commands import ( change_address,", "from argparse import ArgumentParser, _SubParsersAction from logging import StreamHandler, root", "VM, change CPUs, and restart VM', ) subparser = subparsers.add_parser(", "'\\033[1;33m{}\\033[0m' ERROR = '\\033[1;31m{}\\033[0m' CRITICAL = '\\033[1;41m{}\\033[0m' class IGVMArgumentParser(ArgumentParser): def", "to a HV with the same environment of VM' )", "library documentation [1] for the levels. # Paramiko is overly", "--offline-transport=xfs', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all preferences so", "VM to new HV while changing IP address', ) subparser.add_argument(", "VM, change memory, and restart VM', ) subparser = subparsers.add_parser(", "system', ) subparser.add_argument( 'new_address', help=( 'New IPv4 address of VM'", "offline', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migrating to a", "help for choice, subparser in subparsers_action.choices.items(): out.append(ColorFormatters.BOLD.format(choice)) if subparser.get_default('func').__doc__: out.append('\\n'.join(", "VM after stopping it', ) subparser = subparsers.add_parser( 'restart', description=vm_restart.__doc__,", "after first boot', ) subparser.add_argument( '--skip-puppet', action='store_false', dest='run_puppet', help='Skip running", "'info', description=host_info.__doc__, ) subparser.set_defaults(func=host_info) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "giving an error is not # better. See Python logging", "but just print what would be done' ) subparser.add_argument( '--offline',", "in GiB. ' 'Works only with --offline --offline-transport=xfs', ) subparser.add_argument(", "ColorFormatters(): BOLD = '\\033[1m{}\\033[0m' WARNING = '\\033[1;33m{}\\033[0m' ERROR = '\\033[1;31m{}\\033[0m'", "defined', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all preferences so", "CPUs', ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, change CPUs, and", "integers are allowed.' ), ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM,", "CPUs, and restart VM', ) subparser = subparsers.add_parser( 'start', description=vm_start.__doc__,", "'Can be specified relative with \"+\". Only integers are allowed'", "set it to given state, maintenance by default', ) subparser", "choices=('drbd', 'netcat', 'xfs'), help=( 'Specify drbd (default), netcat or xfs", "dest='allow_reserved_hv', action='store_true', help='Allow migration to a Host which has the", "level).format(msg) return msg def parse_args(): top_parser = IGVMArgumentParser('igvm') top_parser.add_argument('--silent', '-s',", "'--offline-transport', default='drbd', help=( 'Specify drbd (default) or netcat transport to", "default='drbd', help=( 'Specify drbd (default) or netcat transport to migrate", "subparsers.add_parser( 'mem-set', description=mem_set.__doc__, ) subparser.set_defaults(func=mem_set) subparser.add_argument( 'vm_hostname', help='Hostname of the", "help='Hostname of the guest system', ) subparser.add_argument( '--retire', action='store_true', help='Set", ") subparser = subparsers.add_parser( 'sync', description=vm_sync.__doc__, ) subparser.set_defaults(func=vm_sync) subparser.add_argument( 'vm_hostname',", "the guest system', ) subparser.add_argument( 'new_address', help=( 'New IPv4 address", "but you ' 'are in urgent need to do it", "only to a HV with the same environment of VM'", "to use latest hypervisor settings', ) subparser = subparsers.add_parser( 'delete',", "to disconnect from # the hypervisors. disconnect_all() close_virtconns() # The", ") subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, if running', ) subparser", "migrate ' 'disk image' ), ) subparser.add_argument( '--no-shutdown', action='store_true', help=(", "to avoid this. time.sleep(0.1) def configure_root_logger(silent, verbose): root_logger.addHandler(IGVMLogHandler()) # We", ") subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migration to a Host", "Run igvm ' 'with --verbose to check why it fails", ") subparser.add_argument( 'size', help=( 'New disk size with an optional", "Expects new size in GiB. ' 'Works only with --offline", "VM. Expects new size in GiB. ' 'Works only with", "subparser.set_defaults(func=vm_sync) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser", "'define', description=vm_define.__doc__, ) subparser.set_defaults(func=vm_define) subparser.add_argument('vm_hostname', help='Hostname of the guest system')", "state online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd', help=( 'Specify drbd (default)", "top_parser.add_subparsers(help='Actions') subparser = subparsers.add_parser( 'build', description=vm_build.__doc__, ) subparser.set_defaults(func=vm_build) subparser.add_argument( 'vm_hostname',", "'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( '--postboot', metavar='postboot_script',", "action='store_false', dest='run_puppet', help='Skip running puppet in chroot before powering up',", "address', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migration to a", "subparser.set_defaults(func=vm_rename) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "if running', ) subparser = subparsers.add_parser( 'evacuate', description=evacuate.__doc__, ) subparser.set_defaults(func=evacuate)", "migrate but just print what would be done' ) subparser.add_argument(", "system') return vars(top_parser.parse_args()) def main(): args = parse_args() configure_root_logger(args.pop('silent'), args.pop('verbose'))", "in chroot before powering up', ) subparser.add_argument( '--debug-puppet', action='store_true', help='Run", "'--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migrating to a host which has", "of the hypervisor', ) subparser.add_argument( 'dst_hv_hostname', nargs='?', default=None, help='Hostname of", "disconnect_all from igvm.commands import ( change_address, disk_set, evacuate, host_info, mem_set,", "action='store_true', help='Allow building on a Host which has the state", "down VM for 24h.' ), ) subparser.add_argument( '--enforce-vm-env', dest='enforce_vm_env', action='store_true',", "= subparsers.add_parser( 'disk-set', description=disk_set.__doc__, ) subparser.set_defaults(func=disk_set) subparser.add_argument( 'vm_hostname', help='Hostname of", "defined VM or build it if not defined', ) subparser.add_argument(", "_SubParsersAction) for a in self._actions): return super(IGVMArgumentParser, self).format_help() out =", "first boot', ) subparser.add_argument( '--skip-puppet', action='store_false', dest='run_puppet', help='Skip running puppet", "fails to find a matching Hypervisor, but you ' 'are", "There will probably only be one subparser_action, but better safe", "args = parse_args() configure_root_logger(args.pop('silent'), args.pop('verbose')) try: args.pop('func')(**args) finally: # Fabric", "it to given state, maintenance by default', ) subparser =", "and restart VM', ) subparser = subparsers.add_parser( 'start', description=vm_start.__doc__, )", "given state, maintenance by default', ) subparser = subparsers.add_parser( 'stop',", "you ' 'are in urgent need to do it anyway.", "host_info, mem_set, vcpu_set, vm_build, vm_delete, vm_migrate, vm_rename, vm_restart, vm_start, vm_stop,", "self.stream.isatty() def format(self, record): level = record.levelname msg = '{}:", "in subparsers_actions: # Get all subparsers and print help for", "state online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd', choices=('drbd', 'netcat', 'xfs'), help=(", "taking our chance to disconnect from # the hypervisors. disconnect_all()", "which has the state online_reserved', ) subparser.add_argument( '--rebuild', dest='rebuild', action='store_true',", "record.getMessage()) if self.isatty and level in vars(ColorFormatters): msg = getattr(ColorFormatters,", "configure it for one level higher. # # [1] https://docs.python.org/library/logging.html#logging-levels", "subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all preferences so that Hypervisors", ") subparser.set_defaults(func=vm_delete) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "vm_define, ) from igvm.libvirt import close_virtconns class ColorFormatters(): BOLD =", "that Hypervisors are not excluded. ' 'Use this if igvm", "help='Force offline migration', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migration", "out.append('Available commands:\\n') subparsers_actions = [ action for action in self._actions", "(default GiB). ' 'Can be specified relative with \"+\". Only", "allowed' ) ) subparser = subparsers.add_parser( 'mem-set', description=mem_set.__doc__, ) subparser.set_defaults(func=mem_set)", "are summing up the silent and verbose arguments in here.", "really meaningful to use them both, but giving an error", "to find ' 'a matching Hypervisor something might be really", "out = [] out.append(ColorFormatters.BOLD.format(__doc__)) out.append('Available commands:\\n') subparsers_actions = [ action", "'disk image' ), ) subparser.add_argument( '--no-shutdown', action='store_true', help=( 'Don\\'t shutdown", "to shutdown gracefully', ) subparser.add_argument( '--retire', action='store_true', help='Retire VM after", "'-v', action='count', default=0) subparsers = top_parser.add_subparsers(help='Actions') subparser = subparsers.add_parser( 'build',", "a Hypervisor.', ) subparser = subparsers.add_parser( 'change-address', description=disk_set.__doc__, ) subparser.set_defaults(func=change_address)", "of VM' ) ) subparser.add_argument( '--offline', action='store_true', help='Perform IP address", ") subparser = subparsers.add_parser( 'mem-set', description=mem_set.__doc__, ) subparser.set_defaults(func=mem_set) subparser.add_argument( 'vm_hostname',", "help='Do not wait for guest to shutdown gracefully', ) subparser.add_argument(", "help='Retire VM after stopping it', ) subparser = subparsers.add_parser( 'restart',", "better safe # than sorry. for subparsers_action in subparsers_actions: #", "CRITICAL = '\\033[1;41m{}\\033[0m' class IGVMArgumentParser(ArgumentParser): def format_help(self): if not any(isinstance(a,", "24h.' ), ) subparser.add_argument( '--enforce-vm-env', dest='enforce_vm_env', action='store_true', help='Build or migrate", ") subparser.add_argument( '--retire', action='store_true', help='Set VM state to \"retired\" on", "silent and verbose arguments in here. It # is not", "Copyright (c) 2017 InnoGames GmbH \"\"\" from __future__ import print_function", "are allowed' ) ) subparser = subparsers.add_parser( 'mem-set', description=mem_set.__doc__, )", "configure_root_logger(silent, verbose): root_logger.addHandler(IGVMLogHandler()) # We are summing up the silent", "out.append(ColorFormatters.BOLD.format(choice)) if subparser.get_default('func').__doc__: out.append('\\n'.join( '\\t{}'.format(l.strip()) for l in subparser .get_default('func').__doc__.strip().splitlines()", "chroot before powering up', ) subparser.add_argument( '--debug-puppet', action='store_true', help='Run puppet", "fails finding a Hypervisor.', ) subparser = subparsers.add_parser( 'migrate', description=vm_migrate.__doc__,", "subparser.set_defaults(func=vm_migrate) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument(", "subparser = subparsers.add_parser( 'define', description=vm_define.__doc__, ) subparser.set_defaults(func=vm_define) subparser.add_argument('vm_hostname', help='Hostname of", "'\\t{}'.format(l.strip()) for l in subparser .get_default('func').__doc__.strip().splitlines() )) out.append('\\n\\t{}'.format(subparser.format_usage())) return '\\n'.join(out)", ") subparser.set_defaults(func=vm_restart) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "( change_address, disk_set, evacuate, host_info, mem_set, vcpu_set, vm_build, vm_delete, vm_migrate,", "Hint: If igvm fails to find ' 'a matching Hypervisor", "which has the state online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd', choices=('drbd',", "subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, change memory, and restart VM',", "for action in self._actions if isinstance(action, _SubParsersAction) ] # There", "'Specify drbd (default) or netcat transport to migrate disk image'", ") subparser = subparsers.add_parser( 'rename', description=vm_rename.__doc__, ) subparser.set_defaults(func=vm_rename) subparser.add_argument( 'vm_hostname',", "close_virtconns() # The underlying library of Fabric, Paramiko, raises an", ") subparser.set_defaults(func=vm_stop) subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', )", "subparser.add_argument( 'vm_hostname', help='Hostname of the guest system', ) subparser.add_argument( 'new_hostname',", "just print what would be done' ) subparser.add_argument( '--offline', nargs='*',", "of destination hypervisor', ) subparser.add_argument( '--run-puppet', action='store_true', help='Run puppet in", "'disk-set', description=disk_set.__doc__, ) subparser.set_defaults(func=disk_set) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "use them both, but giving an error is not #", "action='store_true', help='Rebuild already defined VM or build it if not", "'delete', description=vm_delete.__doc__, ) subparser.set_defaults(func=vm_delete) subparser.add_argument( 'vm_hostname', help='Hostname of the guest", "for 24h.' ), ) subparser.add_argument( '--enforce-vm-env', dest='enforce_vm_env', action='store_true', help='Build or", "while changing IP address', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow", "be specified relative with \"+\". Only integers are allowed' )", "are allowed.' ), ) subparser.add_argument( '--offline', action='store_true', help='Shutdown VM, change", "after the disconnect function is called. We are # sleeping", "size with an optional unit (default GiB). ' 'Can be", "before powering up', ) subparser.add_argument( '--debug-puppet', action='store_true', help='Run puppet in", "the state online_reserved', ) subparser.add_argument( '--offline-transport', default='drbd', choices=('drbd', 'netcat', 'xfs'),", "'netcat', 'xfs'), help=( 'Specify drbd (default), netcat or xfs transport", "'--no-shutdown', action='store_true', help=( 'Don\\'t shutdown VM during offline migration, igvm", "xfs transport to migrate ' 'disk image' ), ) subparser.add_argument(", "subparser.add_argument( '--rebuild', dest='rebuild', action='store_true', help='Rebuild already defined VM or build", "def format(self, record): level = record.levelname msg = '{}: {}:", "action='store_true', help='Run puppet in chroot before powering up', ) subparser.add_argument(", "help='Hostname of the guest system', ) subparser.add_argument( 'new_address', help=( 'New", "the guest system', ) subparser.add_argument( 'count', type=int, help='New number of", "new HV while changing IP address', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv',", "in self._actions): return super(IGVMArgumentParser, self).format_help() out = [] out.append(ColorFormatters.BOLD.format(__doc__)) out.append('Available", "with --offline --offline-transport=xfs', ) subparser.add_argument( '--soft-preferences', dest='soft_preferences', action='store_true', help='Overrules all", "fails to find ' 'a matching Hypervisor something might be", "only be one subparser_action, but better safe # than sorry.", "if self.isatty and level in vars(ColorFormatters): msg = getattr(ColorFormatters, level).format(msg)", "'--offline', action='store_true', help='Shutdown VM, change CPUs, and restart VM', )", "subparser.add_argument( '--force', action='store_true', help='Do not wait for guest to shutdown", "the guest system', ) subparser = subparsers.add_parser( 'sync', description=vm_sync.__doc__, )", "while to avoid this. time.sleep(0.1) def configure_root_logger(silent, verbose): root_logger.addHandler(IGVMLogHandler()) #", "help='Shutdown VM, change memory, and restart VM', ) subparser =", "' 'Can be specified relative with \"+\". Only integers are", "= subparsers.add_parser( 'info', description=host_info.__doc__, ) subparser.set_defaults(func=host_info) subparser.add_argument( 'vm_hostname', help='Hostname of", "# is not really meaningful to use them both, but", "done' ) subparser.add_argument( '--offline', nargs='*', help='Migrate VMs matching the given", ") subparser.add_argument( '--no-shutdown', action='store_true', help=( 'Don\\'t shutdown VM during offline", "migrating to a host which has the state online_reserved', )", "subparsers.add_parser( 'evacuate', description=evacuate.__doc__, ) subparser.set_defaults(func=evacuate) subparser.add_argument( 'hv_hostname', help='Hostname of the", "serveradmin function offline', ) subparser.add_argument( '--ignore-reserved', dest='allow_reserved_hv', action='store_true', help='Allow migrating", "'Only integers are allowed.' ), ) subparser.add_argument( '--offline', action='store_true', help='Shutdown", "action='store_true', help='Shutdown VM, if running', ) subparser = subparsers.add_parser( 'evacuate',", "subparser.set_defaults(func=evacuate) subparser.add_argument( 'hv_hostname', help='Hostname of the hypervisor', ) subparser.add_argument( 'dst_hv_hostname',", "an error is not # better. See Python logging library" ]
[ "dataset_path = os.path.join(base_path, dataset_name) processor = PROCESSORS[dataset_name.lower()]() train_dataset = processor.get_train_examples(dataset_path)", "18025 assert len(test_dataset) == 4928 assert test_dataset[0].text_a == \" |", "os.path import abspath, join root_dir = d(d(d(abspath(__file__)))) sys.path.append(root_dir) from openprompt.data_utils.conditional_generation_dataset", "= processor.get_train_examples(dataset_path) valid_dataset = processor.get_train_examples(dataset_path) test_dataset = processor.get_test_examples(dataset_path) assert len(train_dataset)", "== \"\" assert test_dataset[0].tgt_text == \"Abilene, Texas is served by", "base_path = os.path.join(root_dir, \"datasets/CondGen\") def test_WebNLGProcessor(): dataset_name = \"webnlg_2017\" dataset_path", "processor = PROCESSORS[dataset_name.lower()]() train_dataset = processor.get_train_examples(dataset_path) valid_dataset = processor.get_train_examples(dataset_path) test_dataset", "<reponame>puraminy/OpenPrompt import os, sys from os.path import dirname as d", "from os.path import dirname as d from os.path import abspath,", "assert test_dataset[0].tgt_text == \"Abilene, Texas is served by the Abilene", "test_dataset[0].tgt_text == \"Abilene, Texas is served by the Abilene regional", "dirname as d from os.path import abspath, join root_dir =", "d from os.path import abspath, join root_dir = d(d(d(abspath(__file__)))) sys.path.append(root_dir)", "== \" | Abilene_Regional_Airport : cityServed : Abilene,_Texas\" assert test_dataset[0].text_b", "== \"Abilene, Texas is served by the Abilene regional airport.\"", "test_dataset[0].text_b == \"\" assert test_dataset[0].tgt_text == \"Abilene, Texas is served", "openprompt.data_utils.conditional_generation_dataset import PROCESSORS base_path = os.path.join(root_dir, \"datasets/CondGen\") def test_WebNLGProcessor(): dataset_name", "abspath, join root_dir = d(d(d(abspath(__file__)))) sys.path.append(root_dir) from openprompt.data_utils.conditional_generation_dataset import PROCESSORS", "os.path.join(base_path, dataset_name) processor = PROCESSORS[dataset_name.lower()]() train_dataset = processor.get_train_examples(dataset_path) valid_dataset =", "train_dataset = processor.get_train_examples(dataset_path) valid_dataset = processor.get_train_examples(dataset_path) test_dataset = processor.get_test_examples(dataset_path) assert", "== 18025 assert len(test_dataset) == 4928 assert test_dataset[0].text_a == \"", "root_dir = d(d(d(abspath(__file__)))) sys.path.append(root_dir) from openprompt.data_utils.conditional_generation_dataset import PROCESSORS base_path =", "processor.get_train_examples(dataset_path) test_dataset = processor.get_test_examples(dataset_path) assert len(train_dataset) == 18025 assert len(valid_dataset)", "= processor.get_train_examples(dataset_path) test_dataset = processor.get_test_examples(dataset_path) assert len(train_dataset) == 18025 assert", "= os.path.join(base_path, dataset_name) processor = PROCESSORS[dataset_name.lower()]() train_dataset = processor.get_train_examples(dataset_path) valid_dataset", "dataset_name) processor = PROCESSORS[dataset_name.lower()]() train_dataset = processor.get_train_examples(dataset_path) valid_dataset = processor.get_train_examples(dataset_path)", "as d from os.path import abspath, join root_dir = d(d(d(abspath(__file__))))", "def test_WebNLGProcessor(): dataset_name = \"webnlg_2017\" dataset_path = os.path.join(base_path, dataset_name) processor", "Abilene_Regional_Airport : cityServed : Abilene,_Texas\" assert test_dataset[0].text_b == \"\" assert", "\"\" assert test_dataset[0].tgt_text == \"Abilene, Texas is served by the", "from os.path import abspath, join root_dir = d(d(d(abspath(__file__)))) sys.path.append(root_dir) from", "test_dataset[0].text_a == \" | Abilene_Regional_Airport : cityServed : Abilene,_Texas\" assert", "= \"webnlg_2017\" dataset_path = os.path.join(base_path, dataset_name) processor = PROCESSORS[dataset_name.lower()]() train_dataset", "test_WebNLGProcessor(): dataset_name = \"webnlg_2017\" dataset_path = os.path.join(base_path, dataset_name) processor =", "= os.path.join(root_dir, \"datasets/CondGen\") def test_WebNLGProcessor(): dataset_name = \"webnlg_2017\" dataset_path =", "assert test_dataset[0].text_a == \" | Abilene_Regional_Airport : cityServed : Abilene,_Texas\"", "\" | Abilene_Regional_Airport : cityServed : Abilene,_Texas\" assert test_dataset[0].text_b ==", "from openprompt.data_utils.conditional_generation_dataset import PROCESSORS base_path = os.path.join(root_dir, \"datasets/CondGen\") def test_WebNLGProcessor():", "= d(d(d(abspath(__file__)))) sys.path.append(root_dir) from openprompt.data_utils.conditional_generation_dataset import PROCESSORS base_path = os.path.join(root_dir,", "processor.get_train_examples(dataset_path) valid_dataset = processor.get_train_examples(dataset_path) test_dataset = processor.get_test_examples(dataset_path) assert len(train_dataset) ==", "len(test_dataset) == 4928 assert test_dataset[0].text_a == \" | Abilene_Regional_Airport :", "sys from os.path import dirname as d from os.path import", "import os, sys from os.path import dirname as d from", "Abilene,_Texas\" assert test_dataset[0].text_b == \"\" assert test_dataset[0].tgt_text == \"Abilene, Texas", "assert len(test_dataset) == 4928 assert test_dataset[0].text_a == \" | Abilene_Regional_Airport", ": Abilene,_Texas\" assert test_dataset[0].text_b == \"\" assert test_dataset[0].tgt_text == \"Abilene,", "os.path import dirname as d from os.path import abspath, join", "cityServed : Abilene,_Texas\" assert test_dataset[0].text_b == \"\" assert test_dataset[0].tgt_text ==", "4928 assert test_dataset[0].text_a == \" | Abilene_Regional_Airport : cityServed :", "join root_dir = d(d(d(abspath(__file__)))) sys.path.append(root_dir) from openprompt.data_utils.conditional_generation_dataset import PROCESSORS base_path", "import dirname as d from os.path import abspath, join root_dir", "import PROCESSORS base_path = os.path.join(root_dir, \"datasets/CondGen\") def test_WebNLGProcessor(): dataset_name =", "18025 assert len(valid_dataset) == 18025 assert len(test_dataset) == 4928 assert", "test_dataset = processor.get_test_examples(dataset_path) assert len(train_dataset) == 18025 assert len(valid_dataset) ==", "assert len(valid_dataset) == 18025 assert len(test_dataset) == 4928 assert test_dataset[0].text_a", "PROCESSORS[dataset_name.lower()]() train_dataset = processor.get_train_examples(dataset_path) valid_dataset = processor.get_train_examples(dataset_path) test_dataset = processor.get_test_examples(dataset_path)", "valid_dataset = processor.get_train_examples(dataset_path) test_dataset = processor.get_test_examples(dataset_path) assert len(train_dataset) == 18025", "\"webnlg_2017\" dataset_path = os.path.join(base_path, dataset_name) processor = PROCESSORS[dataset_name.lower()]() train_dataset =", "import abspath, join root_dir = d(d(d(abspath(__file__)))) sys.path.append(root_dir) from openprompt.data_utils.conditional_generation_dataset import", "os.path.join(root_dir, \"datasets/CondGen\") def test_WebNLGProcessor(): dataset_name = \"webnlg_2017\" dataset_path = os.path.join(base_path,", "assert len(train_dataset) == 18025 assert len(valid_dataset) == 18025 assert len(test_dataset)", "| Abilene_Regional_Airport : cityServed : Abilene,_Texas\" assert test_dataset[0].text_b == \"\"", "= PROCESSORS[dataset_name.lower()]() train_dataset = processor.get_train_examples(dataset_path) valid_dataset = processor.get_train_examples(dataset_path) test_dataset =", "\"datasets/CondGen\") def test_WebNLGProcessor(): dataset_name = \"webnlg_2017\" dataset_path = os.path.join(base_path, dataset_name)", "= processor.get_test_examples(dataset_path) assert len(train_dataset) == 18025 assert len(valid_dataset) == 18025", "sys.path.append(root_dir) from openprompt.data_utils.conditional_generation_dataset import PROCESSORS base_path = os.path.join(root_dir, \"datasets/CondGen\") def", "d(d(d(abspath(__file__)))) sys.path.append(root_dir) from openprompt.data_utils.conditional_generation_dataset import PROCESSORS base_path = os.path.join(root_dir, \"datasets/CondGen\")", "dataset_name = \"webnlg_2017\" dataset_path = os.path.join(base_path, dataset_name) processor = PROCESSORS[dataset_name.lower()]()", "PROCESSORS base_path = os.path.join(root_dir, \"datasets/CondGen\") def test_WebNLGProcessor(): dataset_name = \"webnlg_2017\"", "== 18025 assert len(valid_dataset) == 18025 assert len(test_dataset) == 4928", "os, sys from os.path import dirname as d from os.path", "processor.get_test_examples(dataset_path) assert len(train_dataset) == 18025 assert len(valid_dataset) == 18025 assert", "len(valid_dataset) == 18025 assert len(test_dataset) == 4928 assert test_dataset[0].text_a ==", ": cityServed : Abilene,_Texas\" assert test_dataset[0].text_b == \"\" assert test_dataset[0].tgt_text", "assert test_dataset[0].text_b == \"\" assert test_dataset[0].tgt_text == \"Abilene, Texas is", "== 4928 assert test_dataset[0].text_a == \" | Abilene_Regional_Airport : cityServed", "len(train_dataset) == 18025 assert len(valid_dataset) == 18025 assert len(test_dataset) ==" ]
[ "syntax/format \", RESOURCE_NOT_AVAILABLE : \"Not available\", RESOURCE_ALLOCATE_FAILURE : \"Allocate failure\",", "0x1004 OPTION_NOT_SUPPORTED = 0x1005 OPTION_UNKNOWN_ERROR = 0x1FFF PARAMETER_INVALID = 0x2000", "0x4000 RESOURCE_ALLOCATE_FAILURE = 0x4001 RESOURCE_FULL = 0x4002 RESOURCE_OVERFLOW = 0x4003", "set\", ATTRIBUTE_UPDATE_FAILURE: \"Failed to update\", ATTRIBUTE_ACCESS_DENIED : \"Access denied\", ATTRIBUTE_UNKNOWN_ERROR", "0x5FFF IO_NOT_READY = 0x6000 IO_BUSY = 0x6001 IO_TIMEOUT = 0x6002", "ATTRIBUTE_SET_FAILURE = 0x5002 ATTRIBUTE_UPDATE_FAILURE = 0x5003 ATTRIBUTE_ACCESS_DENIED = 0x5004 ATTRIBUTE_UNKNOWN_ERROR", "= 0xF012 PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR = 0xF013 ABORT_ERROR = 0xFFFE UNKNOWN_ERROR =", "error in IO operation\", UNKNOWN_ERROR : \"Unknown error\", } ##", "FILE_WRITE_FAILURE = 2 FILE_PARSE_FAILURE = 3 FILE_READ_FAILURE = 4 FILE_CREATE_FAILURE", "= 0xFFFE UNKNOWN_ERROR = 0xFFFF ## Error message of each", "when processing options\", PARAMETER_INVALID : \"Invalid parameter\", PARAMETER_MISSING : \"Missing", "option\", OPTION_NOT_SUPPORTED : \"Unsupported option\", OPTION_UNKNOWN_ERROR : \"Unknown error when", "IS DISTRIBUTED UNDER THE BSD LICENSE ON AN \"AS IS\"", "RESOURCE_OVERFLOW = 0x4003 RESOURCE_UNDERRUN = 0x4004 RESOURCE_UNKNOWN_ERROR = 0x4FFF ATTRIBUTE_NOT_AVAILABLE", ": \"Missing parameter\", PARAMETER_UNKNOWN_ERROR : \"Unknown error in parameters\", FORMAT_INVALID", "OPTION_VALUE_INVALID = 0x1003 OPTION_DEPRECATED = 0x1004 OPTION_NOT_SUPPORTED = 0x1005 OPTION_UNKNOWN_ERROR", "@file # Standardized Error Hanlding infrastructures. # # Copyright (c)", "0xF009 WARNING_AS_ERROR = 0xF006 MIGRATION_ERROR = 0xF010 PCD_VALIDATION_INFO_ERROR = 0xF011", "reserved.<BR> # This program and the accompanying materials # are", "processing options\", PARAMETER_INVALID : \"Invalid parameter\", PARAMETER_MISSING : \"Missing parameter\",", "retrieve\", ATTRIBUTE_SET_FAILURE : \"Failed to set\", ATTRIBUTE_UPDATE_FAILURE: \"Failed to update\",", "ABORT_ERROR = 0xFFFE UNKNOWN_ERROR = 0xFFFF ## Error message of", "9 FILE_DELETE_FAILURE = 10 FILE_COPY_FAILURE = 11 FILE_POSITIONING_FAILURE = 12", "0xF005 DDC_ERROR = 0xF009 WARNING_AS_ERROR = 0xF006 MIGRATION_ERROR = 0xF010", "= 0xF009 WARNING_AS_ERROR = 0xF006 MIGRATION_ERROR = 0xF010 PCD_VALIDATION_INFO_ERROR =", ": \"Underrun\", RESOURCE_UNKNOWN_ERROR : \"Unknown error\", ATTRIBUTE_NOT_AVAILABLE : \"Not available\",", "FILE_TYPE_MISMATCH : \"Incorrect file type\", FILE_CASE_MISMATCH : \"File name case", "http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE BSD", "indicating a fatal error class FatalError(Exception): pass if __name__ ==", "error in syntax/format \", RESOURCE_NOT_AVAILABLE : \"Not available\", RESOURCE_ALLOCATE_FAILURE :", "RESOURCE_ALLOCATE_FAILURE = 0x4001 RESOURCE_FULL = 0x4002 RESOURCE_OVERFLOW = 0x4003 RESOURCE_UNDERRUN", "FILE_PARSE_FAILURE = 3 FILE_READ_FAILURE = 4 FILE_CREATE_FAILURE = 5 FILE_CHECKSUM_FAILURE", "# are licensed and made available under the terms and", "\"File/directory not found in workspace\", FILE_OPEN_FAILURE : \"File open failure\",", "THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN", "mismatch\", FILE_DUPLICATED : \"Duplicated file found\", FILE_UNKNOWN_ERROR : \"Unknown error", "FORMAT_UNKNOWN : \"Unknown format\", FORMAT_UNKNOWN_ERROR : \"Unknown error in syntax/format", "License # which accompanies this distribution. The full text of", "open failure\", FILE_WRITE_FAILURE : \"File write failure\", FILE_PARSE_FAILURE : \"File", "= 0x1FFF PARAMETER_INVALID = 0x2000 PARAMETER_MISSING = 0x2001 PARAMETER_UNKNOWN_ERROR =0x2FFF", "under the terms and conditions of the BSD License #", ": \"File copy failure\", FILE_POSITIONING_FAILURE: \"Failed to seeking position\", FILE_ALREADY_EXIST", "Exception indicating a fatal error class FatalError(Exception): pass if __name__", "FILE_CREATE_FAILURE = 5 FILE_CHECKSUM_FAILURE = 6 FILE_COMPRESS_FAILURE = 7 FILE_DECOMPRESS_FAILURE", "FILE_COPY_FAILURE : \"File copy failure\", FILE_POSITIONING_FAILURE: \"Failed to seeking position\",", "= 0x6000 IO_BUSY = 0x6001 IO_TIMEOUT = 0x6002 IO_UNKNOWN_ERROR =", ": \"Busy\", IO_TIMEOUT : \"Timeout\", IO_UNKNOWN_ERROR : \"Unknown error in", "EITHER EXPRESS OR IMPLIED. # FILE_OPEN_FAILURE = 1 FILE_WRITE_FAILURE =", "\"Unknown format\", FORMAT_UNKNOWN_ERROR : \"Unknown error in syntax/format \", RESOURCE_NOT_AVAILABLE", "BSD LICENSE ON AN \"AS IS\" BASIS, # WITHOUT WARRANTIES", "\"File write failure\", FILE_PARSE_FAILURE : \"File parse failure\", FILE_READ_FAILURE :", "IO_BUSY = 0x6001 IO_TIMEOUT = 0x6002 IO_UNKNOWN_ERROR = 0x6FFF COMMAND_FAILURE", "made available under the terms and conditions of the BSD", "FILE_PARSE_FAILURE : \"File parse failure\", FILE_READ_FAILURE : \"File read failure\",", "options\", PARAMETER_INVALID : \"Invalid parameter\", PARAMETER_MISSING : \"Missing parameter\", PARAMETER_UNKNOWN_ERROR", ": \"Invalid parameter\", PARAMETER_MISSING : \"Missing parameter\", PARAMETER_UNKNOWN_ERROR : \"Unknown", "syntax/format\", FORMAT_NOT_SUPPORTED : \"Not supported syntax/format\", FORMAT_UNKNOWN : \"Unknown format\",", "This program and the accompanying materials # are licensed and", "license may be found at # http://opensource.org/licenses/bsd-license.php # # THE", ": \"File decompress failure\", FILE_MOVE_FAILURE : \"File move failure\", FILE_DELETE_FAILURE", "3 FILE_READ_FAILURE = 4 FILE_CREATE_FAILURE = 5 FILE_CHECKSUM_FAILURE = 6", "0x6001 IO_TIMEOUT = 0x6002 IO_UNKNOWN_ERROR = 0x6FFF COMMAND_FAILURE = 0x7000", "failure\", FILE_POSITIONING_FAILURE: \"Failed to seeking position\", FILE_ALREADY_EXIST : \"File or", "file\", OPTION_UNKNOWN : \"Unknown option\", OPTION_MISSING : \"Missing option\", OPTION_CONFLICT", "6 FILE_COMPRESS_FAILURE = 7 FILE_DECOMPRESS_FAILURE = 8 FILE_MOVE_FAILURE = 9", "17 FILE_UNKNOWN_ERROR = 0x0FFF OPTION_UNKNOWN = 0x1000 OPTION_MISSING = 0x1001", "OPTION_NOT_SUPPORTED = 0x1005 OPTION_UNKNOWN_ERROR = 0x1FFF PARAMETER_INVALID = 0x2000 PARAMETER_MISSING", "= 14 FILE_TYPE_MISMATCH = 15 FILE_CASE_MISMATCH = 16 FILE_DUPLICATED =", "Error message of each error code gErrorMessage = { FILE_NOT_FOUND", "= 0xF005 DDC_ERROR = 0xF009 WARNING_AS_ERROR = 0xF006 MIGRATION_ERROR =", "this distribution. The full text of the license may be", "= 0x4002 RESOURCE_OVERFLOW = 0x4003 RESOURCE_UNDERRUN = 0x4004 RESOURCE_UNKNOWN_ERROR =", "= 0x5000 ATTRIBUTE_GET_FAILURE = 0x5001 ATTRIBUTE_SET_FAILURE = 0x5002 ATTRIBUTE_UPDATE_FAILURE =", ": \"Invalid checksum of file\", FILE_COMPRESS_FAILURE : \"File compress failure\",", "FILE_POSITIONING_FAILURE: \"Failed to seeking position\", FILE_ALREADY_EXIST : \"File or directory", "= 6 FILE_COMPRESS_FAILURE = 7 FILE_DECOMPRESS_FAILURE = 8 FILE_MOVE_FAILURE =", "# Standardized Error Hanlding infrastructures. # # Copyright (c) 2007", "exists\", FILE_TYPE_MISMATCH : \"Incorrect file type\", FILE_CASE_MISMATCH : \"File name", "0x8000 CODE_ERROR = 0xC0DE AUTOGEN_ERROR = 0xF000 PARSER_ERROR = 0xF001", "accompanies this distribution. The full text of the license may", "FORMAT_INVALID : \"Invalid syntax/format\", FORMAT_NOT_SUPPORTED : \"Not supported syntax/format\", FORMAT_UNKNOWN", "= 0xF011 PCD_VARIABLE_ATTRIBUTES_ERROR = 0xF012 PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR = 0xF013 ABORT_ERROR =", "All rights reserved.<BR> # This program and the accompanying materials", "2007 - 2015, Intel Corporation. All rights reserved.<BR> # This", ": \"Timeout\", IO_UNKNOWN_ERROR : \"Unknown error in IO operation\", UNKNOWN_ERROR", "syntax/format\", FORMAT_UNKNOWN : \"Unknown format\", FORMAT_UNKNOWN_ERROR : \"Unknown error in", "checksum of file\", FILE_COMPRESS_FAILURE : \"File compress failure\", FILE_DECOMPRESS_FAILURE :", "= 0x6FFF COMMAND_FAILURE = 0x7000 PERMISSION_FAILURE = 0x8000 CODE_ERROR =", "RESOURCE_UNDERRUN = 0x4004 RESOURCE_UNKNOWN_ERROR = 0x4FFF ATTRIBUTE_NOT_AVAILABLE = 0x5000 ATTRIBUTE_GET_FAILURE", "value of option\", OPTION_DEPRECATED : \"Deprecated option\", OPTION_NOT_SUPPORTED : \"Unsupported", "0xFFFE UNKNOWN_ERROR = 0xFFFF ## Error message of each error", ": \"File/directory not found in workspace\", FILE_OPEN_FAILURE : \"File open", "on file\", OPTION_UNKNOWN : \"Unknown option\", OPTION_MISSING : \"Missing option\",", "0x1005 OPTION_UNKNOWN_ERROR = 0x1FFF PARAMETER_INVALID = 0x2000 PARAMETER_MISSING = 0x2001", "0x4002 RESOURCE_OVERFLOW = 0x4003 RESOURCE_UNDERRUN = 0x4004 RESOURCE_UNKNOWN_ERROR = 0x4FFF", "\"File parse failure\", FILE_READ_FAILURE : \"File read failure\", FILE_CREATE_FAILURE :", "FILE_COMPRESS_FAILURE = 7 FILE_DECOMPRESS_FAILURE = 8 FILE_MOVE_FAILURE = 9 FILE_DELETE_FAILURE", "of the license may be found at # http://opensource.org/licenses/bsd-license.php #", "error encountered on file\", OPTION_UNKNOWN : \"Unknown option\", OPTION_MISSING :", "Intel Corporation. All rights reserved.<BR> # This program and the", "} ## Exception indicating a fatal error class FatalError(Exception): pass", "FILE_DELETE_FAILURE = 10 FILE_COPY_FAILURE = 11 FILE_POSITIONING_FAILURE = 12 FILE_ALREADY_EXIST", ": \"File or directory already exists\", FILE_TYPE_MISMATCH : \"Incorrect file", "\"Not available\", ATTRIBUTE_GET_FAILURE : \"Failed to retrieve\", ATTRIBUTE_SET_FAILURE : \"Failed", "\"Unknown error in syntax/format \", RESOURCE_NOT_AVAILABLE : \"Not available\", RESOURCE_ALLOCATE_FAILURE", "= 0x3001 FORMAT_UNKNOWN = 0x3002 FORMAT_UNKNOWN_ERROR = 0x3FFF RESOURCE_NOT_AVAILABLE =", "= 15 FILE_CASE_MISMATCH = 16 FILE_DUPLICATED = 17 FILE_UNKNOWN_ERROR =", "0x3000 FORMAT_NOT_SUPPORTED = 0x3001 FORMAT_UNKNOWN = 0x3002 FORMAT_UNKNOWN_ERROR = 0x3FFF", "0x0FFF OPTION_UNKNOWN = 0x1000 OPTION_MISSING = 0x1001 OPTION_CONFLICT = 0x1002", "THE BSD LICENSE ON AN \"AS IS\" BASIS, # WITHOUT", "= 9 FILE_DELETE_FAILURE = 10 FILE_COPY_FAILURE = 11 FILE_POSITIONING_FAILURE =", "OPTION_DEPRECATED : \"Deprecated option\", OPTION_NOT_SUPPORTED : \"Unsupported option\", OPTION_UNKNOWN_ERROR :", "\"Failed to seeking position\", FILE_ALREADY_EXIST : \"File or directory already", "conditions of the BSD License # which accompanies this distribution.", "FILE_CHECKSUM_FAILURE : \"Invalid checksum of file\", FILE_COMPRESS_FAILURE : \"File compress", "PARAMETER_MISSING : \"Missing parameter\", PARAMETER_UNKNOWN_ERROR : \"Unknown error in parameters\",", "0x4003 RESOURCE_UNDERRUN = 0x4004 RESOURCE_UNKNOWN_ERROR = 0x4FFF ATTRIBUTE_NOT_AVAILABLE = 0x5000", "\"Invalid syntax/format\", FORMAT_NOT_SUPPORTED : \"Not supported syntax/format\", FORMAT_UNKNOWN : \"Unknown", "ECC_ERROR = 0xF004 EOT_ERROR = 0xF005 DDC_ERROR = 0xF009 WARNING_AS_ERROR", "OF ANY KIND, EITHER EXPRESS OR IMPLIED. # FILE_OPEN_FAILURE =", "\"File decompress failure\", FILE_MOVE_FAILURE : \"File move failure\", FILE_DELETE_FAILURE :", "= 13 FILE_NOT_FOUND = 14 FILE_TYPE_MISMATCH = 15 FILE_CASE_MISMATCH =", "0xF003 ECC_ERROR = 0xF004 EOT_ERROR = 0xF005 DDC_ERROR = 0xF009", "the BSD License # which accompanies this distribution. The full", "option\", OPTION_CONFLICT : \"Conflict options\", OPTION_VALUE_INVALID : \"Invalid value of", ": \"Incorrect file type\", FILE_CASE_MISMATCH : \"File name case mismatch\",", "IO_TIMEOUT : \"Timeout\", IO_UNKNOWN_ERROR : \"Unknown error in IO operation\",", ": \"Unknown error\", } ## Exception indicating a fatal error", "0x1002 OPTION_VALUE_INVALID = 0x1003 OPTION_DEPRECATED = 0x1004 OPTION_NOT_SUPPORTED = 0x1005", "text of the license may be found at # http://opensource.org/licenses/bsd-license.php", "ATTRIBUTE_SET_FAILURE : \"Failed to set\", ATTRIBUTE_UPDATE_FAILURE: \"Failed to update\", ATTRIBUTE_ACCESS_DENIED", "= 3 FILE_READ_FAILURE = 4 FILE_CREATE_FAILURE = 5 FILE_CHECKSUM_FAILURE =", "= 5 FILE_CHECKSUM_FAILURE = 6 FILE_COMPRESS_FAILURE = 7 FILE_DECOMPRESS_FAILURE =", "# which accompanies this distribution. The full text of the", "FILE_UNKNOWN_ERROR : \"Unknown error encountered on file\", OPTION_UNKNOWN : \"Unknown", "RESOURCE_ALLOCATE_FAILURE : \"Allocate failure\", RESOURCE_FULL : \"Full\", RESOURCE_OVERFLOW : \"Overflow\",", "materials # are licensed and made available under the terms", "\"Failed to retrieve\", ATTRIBUTE_SET_FAILURE : \"Failed to set\", ATTRIBUTE_UPDATE_FAILURE: \"Failed", "infrastructures. # # Copyright (c) 2007 - 2015, Intel Corporation.", "to seeking position\", FILE_ALREADY_EXIST : \"File or directory already exists\",", "available under the terms and conditions of the BSD License", "= 2 FILE_PARSE_FAILURE = 3 FILE_READ_FAILURE = 4 FILE_CREATE_FAILURE =", ": \"Unknown format\", FORMAT_UNKNOWN_ERROR : \"Unknown error in syntax/format \",", "0x4FFF ATTRIBUTE_NOT_AVAILABLE = 0x5000 ATTRIBUTE_GET_FAILURE = 0x5001 ATTRIBUTE_SET_FAILURE = 0x5002", "AN \"AS IS\" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF", "= 0xF003 ECC_ERROR = 0xF004 EOT_ERROR = 0xF005 DDC_ERROR =", "# This program and the accompanying materials # are licensed", "= 11 FILE_POSITIONING_FAILURE = 12 FILE_ALREADY_EXIST = 13 FILE_NOT_FOUND =", ": \"Overflow\", RESOURCE_UNDERRUN : \"Underrun\", RESOURCE_UNKNOWN_ERROR : \"Unknown error\", ATTRIBUTE_NOT_AVAILABLE", "accessing\", COMMAND_FAILURE : \"Failed to execute command\", IO_NOT_READY : \"Not", "BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER", "13 FILE_NOT_FOUND = 14 FILE_TYPE_MISMATCH = 15 FILE_CASE_MISMATCH = 16", "name case mismatch\", FILE_DUPLICATED : \"Duplicated file found\", FILE_UNKNOWN_ERROR :", "PERMISSION_FAILURE = 0x8000 CODE_ERROR = 0xC0DE AUTOGEN_ERROR = 0xF000 PARSER_ERROR", "DDC_ERROR = 0xF009 WARNING_AS_ERROR = 0xF006 MIGRATION_ERROR = 0xF010 PCD_VALIDATION_INFO_ERROR", "\"Unknown option\", OPTION_MISSING : \"Missing option\", OPTION_CONFLICT : \"Conflict options\",", "of file\", FILE_COMPRESS_FAILURE : \"File compress failure\", FILE_DECOMPRESS_FAILURE : \"File", "FORMAT_UNKNOWN = 0x3002 FORMAT_UNKNOWN_ERROR = 0x3FFF RESOURCE_NOT_AVAILABLE = 0x4000 RESOURCE_ALLOCATE_FAILURE", "= 0x3FFF RESOURCE_NOT_AVAILABLE = 0x4000 RESOURCE_ALLOCATE_FAILURE = 0x4001 RESOURCE_FULL =", "ATTRIBUTE_GET_FAILURE = 0x5001 ATTRIBUTE_SET_FAILURE = 0x5002 ATTRIBUTE_UPDATE_FAILURE = 0x5003 ATTRIBUTE_ACCESS_DENIED", "OPTION_CONFLICT = 0x1002 OPTION_VALUE_INVALID = 0x1003 OPTION_DEPRECATED = 0x1004 OPTION_NOT_SUPPORTED", "= 0x1003 OPTION_DEPRECATED = 0x1004 OPTION_NOT_SUPPORTED = 0x1005 OPTION_UNKNOWN_ERROR =", "\"Invalid value of option\", OPTION_DEPRECATED : \"Deprecated option\", OPTION_NOT_SUPPORTED :", "\"Not ready\", IO_BUSY : \"Busy\", IO_TIMEOUT : \"Timeout\", IO_UNKNOWN_ERROR :", ": \"Invalid value of option\", OPTION_DEPRECATED : \"Deprecated option\", OPTION_NOT_SUPPORTED", ": \"Unknown error in syntax/format \", RESOURCE_NOT_AVAILABLE : \"Not available\",", "0x1003 OPTION_DEPRECATED = 0x1004 OPTION_NOT_SUPPORTED = 0x1005 OPTION_UNKNOWN_ERROR = 0x1FFF", "\"Timeout\", IO_UNKNOWN_ERROR : \"Unknown error in IO operation\", UNKNOWN_ERROR :", "= 10 FILE_COPY_FAILURE = 11 FILE_POSITIONING_FAILURE = 12 FILE_ALREADY_EXIST =", "0x3001 FORMAT_UNKNOWN = 0x3002 FORMAT_UNKNOWN_ERROR = 0x3FFF RESOURCE_NOT_AVAILABLE = 0x4000", "or directory already exists\", FILE_TYPE_MISMATCH : \"Incorrect file type\", FILE_CASE_MISMATCH", "FILE_UNKNOWN_ERROR = 0x0FFF OPTION_UNKNOWN = 0x1000 OPTION_MISSING = 0x1001 OPTION_CONFLICT", "\"Unknown error\", ATTRIBUTE_NOT_AVAILABLE : \"Not available\", ATTRIBUTE_GET_FAILURE : \"Failed to", "at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER", "the accompanying materials # are licensed and made available under", "decompress failure\", FILE_MOVE_FAILURE : \"File move failure\", FILE_DELETE_FAILURE : \"File", "IO_UNKNOWN_ERROR : \"Unknown error in IO operation\", UNKNOWN_ERROR : \"Unknown", "OPTION_UNKNOWN : \"Unknown option\", OPTION_MISSING : \"Missing option\", OPTION_CONFLICT :", "4 FILE_CREATE_FAILURE = 5 FILE_CHECKSUM_FAILURE = 6 FILE_COMPRESS_FAILURE = 7", "\"File copy failure\", FILE_POSITIONING_FAILURE: \"Failed to seeking position\", FILE_ALREADY_EXIST :", "OPTION_MISSING = 0x1001 OPTION_CONFLICT = 0x1002 OPTION_VALUE_INVALID = 0x1003 OPTION_DEPRECATED", "\"Failed to set\", ATTRIBUTE_UPDATE_FAILURE: \"Failed to update\", ATTRIBUTE_ACCESS_DENIED : \"Access", ": \"Allocate failure\", RESOURCE_FULL : \"Full\", RESOURCE_OVERFLOW : \"Overflow\", RESOURCE_UNDERRUN", "= 0x3002 FORMAT_UNKNOWN_ERROR = 0x3FFF RESOURCE_NOT_AVAILABLE = 0x4000 RESOURCE_ALLOCATE_FAILURE =", "FILE_ALREADY_EXIST = 13 FILE_NOT_FOUND = 14 FILE_TYPE_MISMATCH = 15 FILE_CASE_MISMATCH", "error\", } ## Exception indicating a fatal error class FatalError(Exception):", "failure\", FILE_COPY_FAILURE : \"File copy failure\", FILE_POSITIONING_FAILURE: \"Failed to seeking", "compress failure\", FILE_DECOMPRESS_FAILURE : \"File decompress failure\", FILE_MOVE_FAILURE : \"File", ": \"Conflict options\", OPTION_VALUE_INVALID : \"Invalid value of option\", OPTION_DEPRECATED", ": \"Not ready\", IO_BUSY : \"Busy\", IO_TIMEOUT : \"Timeout\", IO_UNKNOWN_ERROR", "PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN \"AS", "\"Overflow\", RESOURCE_UNDERRUN : \"Underrun\", RESOURCE_UNKNOWN_ERROR : \"Unknown error\", ATTRIBUTE_NOT_AVAILABLE :", "# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS", "= 0xF010 PCD_VALIDATION_INFO_ERROR = 0xF011 PCD_VARIABLE_ATTRIBUTES_ERROR = 0xF012 PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR =", "OPTION_MISSING : \"Missing option\", OPTION_CONFLICT : \"Conflict options\", OPTION_VALUE_INVALID :", "the license may be found at # http://opensource.org/licenses/bsd-license.php # #", "parameters\", FORMAT_INVALID : \"Invalid syntax/format\", FORMAT_NOT_SUPPORTED : \"Not supported syntax/format\",", "## @file # Standardized Error Hanlding infrastructures. # # Copyright", "# # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE", "= 7 FILE_DECOMPRESS_FAILURE = 8 FILE_MOVE_FAILURE = 9 FILE_DELETE_FAILURE =", "PARAMETER_INVALID = 0x2000 PARAMETER_MISSING = 0x2001 PARAMETER_UNKNOWN_ERROR =0x2FFF FORMAT_INVALID =", "The full text of the license may be found at", "{ FILE_NOT_FOUND : \"File/directory not found in workspace\", FILE_OPEN_FAILURE :", "14 FILE_TYPE_MISMATCH = 15 FILE_CASE_MISMATCH = 16 FILE_DUPLICATED = 17", "= 0x1000 OPTION_MISSING = 0x1001 OPTION_CONFLICT = 0x1002 OPTION_VALUE_INVALID =", ": \"Invalid syntax/format\", FORMAT_NOT_SUPPORTED : \"Not supported syntax/format\", FORMAT_UNKNOWN :", ": \"File read failure\", FILE_CREATE_FAILURE : \"File create failure\", FILE_CHECKSUM_FAILURE", "FILE_READ_FAILURE : \"File read failure\", FILE_CREATE_FAILURE : \"File create failure\",", "= 0xF001 BUILD_ERROR = 0xF002 GENFDS_ERROR = 0xF003 ECC_ERROR =", "ATTRIBUTE_UPDATE_FAILURE: \"Failed to update\", ATTRIBUTE_ACCESS_DENIED : \"Access denied\", ATTRIBUTE_UNKNOWN_ERROR :", "OPTION_UNKNOWN = 0x1000 OPTION_MISSING = 0x1001 OPTION_CONFLICT = 0x1002 OPTION_VALUE_INVALID", "= 0x6001 IO_TIMEOUT = 0x6002 IO_UNKNOWN_ERROR = 0x6FFF COMMAND_FAILURE =", "0x2001 PARAMETER_UNKNOWN_ERROR =0x2FFF FORMAT_INVALID = 0x3000 FORMAT_NOT_SUPPORTED = 0x3001 FORMAT_UNKNOWN", "copy failure\", FILE_POSITIONING_FAILURE: \"Failed to seeking position\", FILE_ALREADY_EXIST : \"File", "FILE_CHECKSUM_FAILURE = 6 FILE_COMPRESS_FAILURE = 7 FILE_DECOMPRESS_FAILURE = 8 FILE_MOVE_FAILURE", "RESOURCE_NOT_AVAILABLE : \"Not available\", RESOURCE_ALLOCATE_FAILURE : \"Allocate failure\", RESOURCE_FULL :", "FILE_CREATE_FAILURE : \"File create failure\", FILE_CHECKSUM_FAILURE : \"Invalid checksum of", "RESOURCE_FULL = 0x4002 RESOURCE_OVERFLOW = 0x4003 RESOURCE_UNDERRUN = 0x4004 RESOURCE_UNKNOWN_ERROR", "OPTION_VALUE_INVALID : \"Invalid value of option\", OPTION_DEPRECATED : \"Deprecated option\",", "= 0xF002 GENFDS_ERROR = 0xF003 ECC_ERROR = 0xF004 EOT_ERROR =", "OPTION_NOT_SUPPORTED : \"Unsupported option\", OPTION_UNKNOWN_ERROR : \"Unknown error when processing", "failure\", FILE_MOVE_FAILURE : \"File move failure\", FILE_DELETE_FAILURE : \"File delete", "11 FILE_POSITIONING_FAILURE = 12 FILE_ALREADY_EXIST = 13 FILE_NOT_FOUND = 14", "of option\", OPTION_DEPRECATED : \"Deprecated option\", OPTION_NOT_SUPPORTED : \"Unsupported option\",", "\"File read failure\", FILE_CREATE_FAILURE : \"File create failure\", FILE_CHECKSUM_FAILURE :", "supported syntax/format\", FORMAT_UNKNOWN : \"Unknown format\", FORMAT_UNKNOWN_ERROR : \"Unknown error", "FORMAT_UNKNOWN_ERROR = 0x3FFF RESOURCE_NOT_AVAILABLE = 0x4000 RESOURCE_ALLOCATE_FAILURE = 0x4001 RESOURCE_FULL", "\"Unsupported option\", OPTION_UNKNOWN_ERROR : \"Unknown error when processing options\", PARAMETER_INVALID", "in parameters\", FORMAT_INVALID : \"Invalid syntax/format\", FORMAT_NOT_SUPPORTED : \"Not supported", "failure\", FILE_DECOMPRESS_FAILURE : \"File decompress failure\", FILE_MOVE_FAILURE : \"File move", "when accessing\", COMMAND_FAILURE : \"Failed to execute command\", IO_NOT_READY :", "IO_NOT_READY : \"Not ready\", IO_BUSY : \"Busy\", IO_TIMEOUT : \"Timeout\",", "PCD_VALIDATION_INFO_ERROR = 0xF011 PCD_VARIABLE_ATTRIBUTES_ERROR = 0xF012 PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR = 0xF013 ABORT_ERROR", "WARNING_AS_ERROR = 0xF006 MIGRATION_ERROR = 0xF010 PCD_VALIDATION_INFO_ERROR = 0xF011 PCD_VARIABLE_ATTRIBUTES_ERROR", "0x6000 IO_BUSY = 0x6001 IO_TIMEOUT = 0x6002 IO_UNKNOWN_ERROR = 0x6FFF", "FILE_ALREADY_EXIST : \"File or directory already exists\", FILE_TYPE_MISMATCH : \"Incorrect", ": \"Not available\", RESOURCE_ALLOCATE_FAILURE : \"Allocate failure\", RESOURCE_FULL : \"Full\",", "0x3002 FORMAT_UNKNOWN_ERROR = 0x3FFF RESOURCE_NOT_AVAILABLE = 0x4000 RESOURCE_ALLOCATE_FAILURE = 0x4001", ": \"Not supported syntax/format\", FORMAT_UNKNOWN : \"Unknown format\", FORMAT_UNKNOWN_ERROR :", "DISTRIBUTED UNDER THE BSD LICENSE ON AN \"AS IS\" BASIS,", "CODE_ERROR = 0xC0DE AUTOGEN_ERROR = 0xF000 PARSER_ERROR = 0xF001 BUILD_ERROR", "= 0x1005 OPTION_UNKNOWN_ERROR = 0x1FFF PARAMETER_INVALID = 0x2000 PARAMETER_MISSING =", "7 FILE_DECOMPRESS_FAILURE = 8 FILE_MOVE_FAILURE = 9 FILE_DELETE_FAILURE = 10", "15 FILE_CASE_MISMATCH = 16 FILE_DUPLICATED = 17 FILE_UNKNOWN_ERROR = 0x0FFF", "OPTION_CONFLICT : \"Conflict options\", OPTION_VALUE_INVALID : \"Invalid value of option\",", "option\", OPTION_DEPRECATED : \"Deprecated option\", OPTION_NOT_SUPPORTED : \"Unsupported option\", OPTION_UNKNOWN_ERROR", "FILE_NOT_FOUND = 14 FILE_TYPE_MISMATCH = 15 FILE_CASE_MISMATCH = 16 FILE_DUPLICATED", "# Copyright (c) 2007 - 2015, Intel Corporation. All rights", "already exists\", FILE_TYPE_MISMATCH : \"Incorrect file type\", FILE_CASE_MISMATCH : \"File", "PCD_VARIABLE_ATTRIBUTES_ERROR = 0xF012 PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR = 0xF013 ABORT_ERROR = 0xFFFE UNKNOWN_ERROR", ": \"File parse failure\", FILE_READ_FAILURE : \"File read failure\", FILE_CREATE_FAILURE", "may be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM", "\", RESOURCE_NOT_AVAILABLE : \"Not available\", RESOURCE_ALLOCATE_FAILURE : \"Allocate failure\", RESOURCE_FULL", "FILE_OPEN_FAILURE = 1 FILE_WRITE_FAILURE = 2 FILE_PARSE_FAILURE = 3 FILE_READ_FAILURE", "KIND, EITHER EXPRESS OR IMPLIED. # FILE_OPEN_FAILURE = 1 FILE_WRITE_FAILURE", "= 0x4003 RESOURCE_UNDERRUN = 0x4004 RESOURCE_UNKNOWN_ERROR = 0x4FFF ATTRIBUTE_NOT_AVAILABLE =", ": \"Failed to retrieve\", ATTRIBUTE_SET_FAILURE : \"Failed to set\", ATTRIBUTE_UPDATE_FAILURE:", "EOT_ERROR = 0xF005 DDC_ERROR = 0xF009 WARNING_AS_ERROR = 0xF006 MIGRATION_ERROR", "OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. #", "option\", OPTION_UNKNOWN_ERROR : \"Unknown error when processing options\", PARAMETER_INVALID :", "gErrorMessage = { FILE_NOT_FOUND : \"File/directory not found in workspace\",", "write failure\", FILE_PARSE_FAILURE : \"File parse failure\", FILE_READ_FAILURE : \"File", "FORMAT_UNKNOWN_ERROR : \"Unknown error in syntax/format \", RESOURCE_NOT_AVAILABLE : \"Not", ": \"Failed to set\", ATTRIBUTE_UPDATE_FAILURE: \"Failed to update\", ATTRIBUTE_ACCESS_DENIED :", "FILE_POSITIONING_FAILURE = 12 FILE_ALREADY_EXIST = 13 FILE_NOT_FOUND = 14 FILE_TYPE_MISMATCH", "0x5000 ATTRIBUTE_GET_FAILURE = 0x5001 ATTRIBUTE_SET_FAILURE = 0x5002 ATTRIBUTE_UPDATE_FAILURE = 0x5003", "in syntax/format \", RESOURCE_NOT_AVAILABLE : \"Not available\", RESOURCE_ALLOCATE_FAILURE : \"Allocate", "type\", FILE_CASE_MISMATCH : \"File name case mismatch\", FILE_DUPLICATED : \"Duplicated", "BUILD_ERROR = 0xF002 GENFDS_ERROR = 0xF003 ECC_ERROR = 0xF004 EOT_ERROR", "which accompanies this distribution. The full text of the license", "\"Duplicated file found\", FILE_UNKNOWN_ERROR : \"Unknown error encountered on file\",", "be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS", "error when processing options\", PARAMETER_INVALID : \"Invalid parameter\", PARAMETER_MISSING :", "= 0xC0DE AUTOGEN_ERROR = 0xF000 PARSER_ERROR = 0xF001 BUILD_ERROR =", "GENFDS_ERROR = 0xF003 ECC_ERROR = 0xF004 EOT_ERROR = 0xF005 DDC_ERROR", "FILE_DELETE_FAILURE : \"File delete failure\", FILE_COPY_FAILURE : \"File copy failure\",", "\"Unknown error when accessing\", COMMAND_FAILURE : \"Failed to execute command\",", "Corporation. All rights reserved.<BR> # This program and the accompanying", "FILE_TYPE_MISMATCH = 15 FILE_CASE_MISMATCH = 16 FILE_DUPLICATED = 17 FILE_UNKNOWN_ERROR", "0x5003 ATTRIBUTE_ACCESS_DENIED = 0x5004 ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF IO_NOT_READY = 0x6000", "OPTION_DEPRECATED = 0x1004 OPTION_NOT_SUPPORTED = 0x1005 OPTION_UNKNOWN_ERROR = 0x1FFF PARAMETER_INVALID", "\"File compress failure\", FILE_DECOMPRESS_FAILURE : \"File decompress failure\", FILE_MOVE_FAILURE :", "failure\", RESOURCE_FULL : \"Full\", RESOURCE_OVERFLOW : \"Overflow\", RESOURCE_UNDERRUN : \"Underrun\",", "found\", FILE_UNKNOWN_ERROR : \"Unknown error encountered on file\", OPTION_UNKNOWN :", "FILE_DECOMPRESS_FAILURE : \"File decompress failure\", FILE_MOVE_FAILURE : \"File move failure\",", "format\", FORMAT_UNKNOWN_ERROR : \"Unknown error in syntax/format \", RESOURCE_NOT_AVAILABLE :", "found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED", "= 0x3000 FORMAT_NOT_SUPPORTED = 0x3001 FORMAT_UNKNOWN = 0x3002 FORMAT_UNKNOWN_ERROR =", "encountered on file\", OPTION_UNKNOWN : \"Unknown option\", OPTION_MISSING : \"Missing", "UNDER THE BSD LICENSE ON AN \"AS IS\" BASIS, #", "IO_UNKNOWN_ERROR = 0x6FFF COMMAND_FAILURE = 0x7000 PERMISSION_FAILURE = 0x8000 CODE_ERROR", "\"Failed to update\", ATTRIBUTE_ACCESS_DENIED : \"Access denied\", ATTRIBUTE_UNKNOWN_ERROR : \"Unknown", "= 0x5003 ATTRIBUTE_ACCESS_DENIED = 0x5004 ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF IO_NOT_READY =", "OPTION_UNKNOWN_ERROR : \"Unknown error when processing options\", PARAMETER_INVALID : \"Invalid", "5 FILE_CHECKSUM_FAILURE = 6 FILE_COMPRESS_FAILURE = 7 FILE_DECOMPRESS_FAILURE = 8", "file found\", FILE_UNKNOWN_ERROR : \"Unknown error encountered on file\", OPTION_UNKNOWN", "IO_TIMEOUT = 0x6002 IO_UNKNOWN_ERROR = 0x6FFF COMMAND_FAILURE = 0x7000 PERMISSION_FAILURE", "\"Conflict options\", OPTION_VALUE_INVALID : \"Invalid value of option\", OPTION_DEPRECATED :", ": \"Unsupported option\", OPTION_UNKNOWN_ERROR : \"Unknown error when processing options\",", "= 0x1001 OPTION_CONFLICT = 0x1002 OPTION_VALUE_INVALID = 0x1003 OPTION_DEPRECATED =", "# FILE_OPEN_FAILURE = 1 FILE_WRITE_FAILURE = 2 FILE_PARSE_FAILURE = 3", "\"Missing parameter\", PARAMETER_UNKNOWN_ERROR : \"Unknown error in parameters\", FORMAT_INVALID :", "ANY KIND, EITHER EXPRESS OR IMPLIED. # FILE_OPEN_FAILURE = 1", "FORMAT_INVALID = 0x3000 FORMAT_NOT_SUPPORTED = 0x3001 FORMAT_UNKNOWN = 0x3002 FORMAT_UNKNOWN_ERROR", "denied\", ATTRIBUTE_UNKNOWN_ERROR : \"Unknown error when accessing\", COMMAND_FAILURE : \"Failed", ": \"Unknown error in IO operation\", UNKNOWN_ERROR : \"Unknown error\",", "\"Unknown error in IO operation\", UNKNOWN_ERROR : \"Unknown error\", }", "ATTRIBUTE_ACCESS_DENIED = 0x5004 ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF IO_NOT_READY = 0x6000 IO_BUSY", ": \"Unknown error when accessing\", COMMAND_FAILURE : \"Failed to execute", "0xF006 MIGRATION_ERROR = 0xF010 PCD_VALIDATION_INFO_ERROR = 0xF011 PCD_VARIABLE_ATTRIBUTES_ERROR = 0xF012", "message of each error code gErrorMessage = { FILE_NOT_FOUND :", "failure\", FILE_DELETE_FAILURE : \"File delete failure\", FILE_COPY_FAILURE : \"File copy", "failure\", FILE_CHECKSUM_FAILURE : \"Invalid checksum of file\", FILE_COMPRESS_FAILURE : \"File", "RESOURCE_FULL : \"Full\", RESOURCE_OVERFLOW : \"Overflow\", RESOURCE_UNDERRUN : \"Underrun\", RESOURCE_UNKNOWN_ERROR", ": \"File write failure\", FILE_PARSE_FAILURE : \"File parse failure\", FILE_READ_FAILURE", "0x7000 PERMISSION_FAILURE = 0x8000 CODE_ERROR = 0xC0DE AUTOGEN_ERROR = 0xF000", "\"File move failure\", FILE_DELETE_FAILURE : \"File delete failure\", FILE_COPY_FAILURE :", "of each error code gErrorMessage = { FILE_NOT_FOUND : \"File/directory", "PARAMETER_MISSING = 0x2001 PARAMETER_UNKNOWN_ERROR =0x2FFF FORMAT_INVALID = 0x3000 FORMAT_NOT_SUPPORTED =", "# http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE", "failure\", FILE_PARSE_FAILURE : \"File parse failure\", FILE_READ_FAILURE : \"File read", "file type\", FILE_CASE_MISMATCH : \"File name case mismatch\", FILE_DUPLICATED :", "0x6FFF COMMAND_FAILURE = 0x7000 PERMISSION_FAILURE = 0x8000 CODE_ERROR = 0xC0DE", ": \"Failed to execute command\", IO_NOT_READY : \"Not ready\", IO_BUSY", "RESOURCE_UNDERRUN : \"Underrun\", RESOURCE_UNKNOWN_ERROR : \"Unknown error\", ATTRIBUTE_NOT_AVAILABLE : \"Not", "= 4 FILE_CREATE_FAILURE = 5 FILE_CHECKSUM_FAILURE = 6 FILE_COMPRESS_FAILURE =", "COMMAND_FAILURE : \"Failed to execute command\", IO_NOT_READY : \"Not ready\",", "UNKNOWN_ERROR : \"Unknown error\", } ## Exception indicating a fatal", "= 0xF004 EOT_ERROR = 0xF005 DDC_ERROR = 0xF009 WARNING_AS_ERROR =", "= 17 FILE_UNKNOWN_ERROR = 0x0FFF OPTION_UNKNOWN = 0x1000 OPTION_MISSING =", "= 0x4004 RESOURCE_UNKNOWN_ERROR = 0x4FFF ATTRIBUTE_NOT_AVAILABLE = 0x5000 ATTRIBUTE_GET_FAILURE =", "= 16 FILE_DUPLICATED = 17 FILE_UNKNOWN_ERROR = 0x0FFF OPTION_UNKNOWN =", "(c) 2007 - 2015, Intel Corporation. All rights reserved.<BR> #", ": \"Unknown error\", ATTRIBUTE_NOT_AVAILABLE : \"Not available\", ATTRIBUTE_GET_FAILURE : \"Failed", "code gErrorMessage = { FILE_NOT_FOUND : \"File/directory not found in", "RESOURCE_UNKNOWN_ERROR = 0x4FFF ATTRIBUTE_NOT_AVAILABLE = 0x5000 ATTRIBUTE_GET_FAILURE = 0x5001 ATTRIBUTE_SET_FAILURE", "distribution. The full text of the license may be found", "= 0x1004 OPTION_NOT_SUPPORTED = 0x1005 OPTION_UNKNOWN_ERROR = 0x1FFF PARAMETER_INVALID =", "error code gErrorMessage = { FILE_NOT_FOUND : \"File/directory not found", "case mismatch\", FILE_DUPLICATED : \"Duplicated file found\", FILE_UNKNOWN_ERROR : \"Unknown", "\"File open failure\", FILE_WRITE_FAILURE : \"File write failure\", FILE_PARSE_FAILURE :", "= 0x7000 PERMISSION_FAILURE = 0x8000 CODE_ERROR = 0xC0DE AUTOGEN_ERROR =", "\"Invalid checksum of file\", FILE_COMPRESS_FAILURE : \"File compress failure\", FILE_DECOMPRESS_FAILURE", "delete failure\", FILE_COPY_FAILURE : \"File copy failure\", FILE_POSITIONING_FAILURE: \"Failed to", "0xF000 PARSER_ERROR = 0xF001 BUILD_ERROR = 0xF002 GENFDS_ERROR = 0xF003", "= 0x4001 RESOURCE_FULL = 0x4002 RESOURCE_OVERFLOW = 0x4003 RESOURCE_UNDERRUN =", "\"Unknown error encountered on file\", OPTION_UNKNOWN : \"Unknown option\", OPTION_MISSING", "FORMAT_NOT_SUPPORTED = 0x3001 FORMAT_UNKNOWN = 0x3002 FORMAT_UNKNOWN_ERROR = 0x3FFF RESOURCE_NOT_AVAILABLE", "0x4004 RESOURCE_UNKNOWN_ERROR = 0x4FFF ATTRIBUTE_NOT_AVAILABLE = 0x5000 ATTRIBUTE_GET_FAILURE = 0x5001", "ready\", IO_BUSY : \"Busy\", IO_TIMEOUT : \"Timeout\", IO_UNKNOWN_ERROR : \"Unknown", "0x5001 ATTRIBUTE_SET_FAILURE = 0x5002 ATTRIBUTE_UPDATE_FAILURE = 0x5003 ATTRIBUTE_ACCESS_DENIED = 0x5004", ": \"Unknown option\", OPTION_MISSING : \"Missing option\", OPTION_CONFLICT : \"Conflict", "1 FILE_WRITE_FAILURE = 2 FILE_PARSE_FAILURE = 3 FILE_READ_FAILURE = 4", "0x2000 PARAMETER_MISSING = 0x2001 PARAMETER_UNKNOWN_ERROR =0x2FFF FORMAT_INVALID = 0x3000 FORMAT_NOT_SUPPORTED", "IMPLIED. # FILE_OPEN_FAILURE = 1 FILE_WRITE_FAILURE = 2 FILE_PARSE_FAILURE =", "operation\", UNKNOWN_ERROR : \"Unknown error\", } ## Exception indicating a", "\"Underrun\", RESOURCE_UNKNOWN_ERROR : \"Unknown error\", ATTRIBUTE_NOT_AVAILABLE : \"Not available\", ATTRIBUTE_GET_FAILURE", "OR IMPLIED. # FILE_OPEN_FAILURE = 1 FILE_WRITE_FAILURE = 2 FILE_PARSE_FAILURE", ": \"File name case mismatch\", FILE_DUPLICATED : \"Duplicated file found\",", "0x4001 RESOURCE_FULL = 0x4002 RESOURCE_OVERFLOW = 0x4003 RESOURCE_UNDERRUN = 0x4004", "FILE_COMPRESS_FAILURE : \"File compress failure\", FILE_DECOMPRESS_FAILURE : \"File decompress failure\",", "- 2015, Intel Corporation. All rights reserved.<BR> # This program", "0xF011 PCD_VARIABLE_ATTRIBUTES_ERROR = 0xF012 PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR = 0xF013 ABORT_ERROR = 0xFFFE", "8 FILE_MOVE_FAILURE = 9 FILE_DELETE_FAILURE = 10 FILE_COPY_FAILURE = 11", "AUTOGEN_ERROR = 0xF000 PARSER_ERROR = 0xF001 BUILD_ERROR = 0xF002 GENFDS_ERROR", "IS\" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND,", "2015, Intel Corporation. All rights reserved.<BR> # This program and", "0x1FFF PARAMETER_INVALID = 0x2000 PARAMETER_MISSING = 0x2001 PARAMETER_UNKNOWN_ERROR =0x2FFF FORMAT_INVALID", "in workspace\", FILE_OPEN_FAILURE : \"File open failure\", FILE_WRITE_FAILURE : \"File", "= 12 FILE_ALREADY_EXIST = 13 FILE_NOT_FOUND = 14 FILE_TYPE_MISMATCH =", "PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR = 0xF013 ABORT_ERROR = 0xFFFE UNKNOWN_ERROR = 0xFFFF ##", ": \"File delete failure\", FILE_COPY_FAILURE : \"File copy failure\", FILE_POSITIONING_FAILURE:", "terms and conditions of the BSD License # which accompanies", "workspace\", FILE_OPEN_FAILURE : \"File open failure\", FILE_WRITE_FAILURE : \"File write", "WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.", "= 0x2001 PARAMETER_UNKNOWN_ERROR =0x2FFF FORMAT_INVALID = 0x3000 FORMAT_NOT_SUPPORTED = 0x3001", "RESOURCE_NOT_AVAILABLE = 0x4000 RESOURCE_ALLOCATE_FAILURE = 0x4001 RESOURCE_FULL = 0x4002 RESOURCE_OVERFLOW", "FILE_READ_FAILURE = 4 FILE_CREATE_FAILURE = 5 FILE_CHECKSUM_FAILURE = 6 FILE_COMPRESS_FAILURE", "file\", FILE_COMPRESS_FAILURE : \"File compress failure\", FILE_DECOMPRESS_FAILURE : \"File decompress", "Copyright (c) 2007 - 2015, Intel Corporation. All rights reserved.<BR>", ": \"File open failure\", FILE_WRITE_FAILURE : \"File write failure\", FILE_PARSE_FAILURE", "Error Hanlding infrastructures. # # Copyright (c) 2007 - 2015,", "\"Access denied\", ATTRIBUTE_UNKNOWN_ERROR : \"Unknown error when accessing\", COMMAND_FAILURE :", "= 0x1002 OPTION_VALUE_INVALID = 0x1003 OPTION_DEPRECATED = 0x1004 OPTION_NOT_SUPPORTED =", "Hanlding infrastructures. # # Copyright (c) 2007 - 2015, Intel", "EXPRESS OR IMPLIED. # FILE_OPEN_FAILURE = 1 FILE_WRITE_FAILURE = 2", "are licensed and made available under the terms and conditions", "MIGRATION_ERROR = 0xF010 PCD_VALIDATION_INFO_ERROR = 0xF011 PCD_VARIABLE_ATTRIBUTES_ERROR = 0xF012 PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR", "## Exception indicating a fatal error class FatalError(Exception): pass if", ": \"Access denied\", ATTRIBUTE_UNKNOWN_ERROR : \"Unknown error when accessing\", COMMAND_FAILURE", "= 0xF013 ABORT_ERROR = 0xFFFE UNKNOWN_ERROR = 0xFFFF ## Error", "move failure\", FILE_DELETE_FAILURE : \"File delete failure\", FILE_COPY_FAILURE : \"File", "full text of the license may be found at #", "\"File delete failure\", FILE_COPY_FAILURE : \"File copy failure\", FILE_POSITIONING_FAILURE: \"Failed", "FILE_CASE_MISMATCH = 16 FILE_DUPLICATED = 17 FILE_UNKNOWN_ERROR = 0x0FFF OPTION_UNKNOWN", "COMMAND_FAILURE = 0x7000 PERMISSION_FAILURE = 0x8000 CODE_ERROR = 0xC0DE AUTOGEN_ERROR", "0xF001 BUILD_ERROR = 0xF002 GENFDS_ERROR = 0xF003 ECC_ERROR = 0xF004", "= 0x5004 ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF IO_NOT_READY = 0x6000 IO_BUSY =", "0x5002 ATTRIBUTE_UPDATE_FAILURE = 0x5003 ATTRIBUTE_ACCESS_DENIED = 0x5004 ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF", "FILE_MOVE_FAILURE = 9 FILE_DELETE_FAILURE = 10 FILE_COPY_FAILURE = 11 FILE_POSITIONING_FAILURE", "\"Busy\", IO_TIMEOUT : \"Timeout\", IO_UNKNOWN_ERROR : \"Unknown error in IO", "0xF002 GENFDS_ERROR = 0xF003 ECC_ERROR = 0xF004 EOT_ERROR = 0xF005", "error in parameters\", FORMAT_INVALID : \"Invalid syntax/format\", FORMAT_NOT_SUPPORTED : \"Not", "available\", ATTRIBUTE_GET_FAILURE : \"Failed to retrieve\", ATTRIBUTE_SET_FAILURE : \"Failed to", "update\", ATTRIBUTE_ACCESS_DENIED : \"Access denied\", ATTRIBUTE_UNKNOWN_ERROR : \"Unknown error when", "0x1001 OPTION_CONFLICT = 0x1002 OPTION_VALUE_INVALID = 0x1003 OPTION_DEPRECATED = 0x1004", "Standardized Error Hanlding infrastructures. # # Copyright (c) 2007 -", "= 0x5001 ATTRIBUTE_SET_FAILURE = 0x5002 ATTRIBUTE_UPDATE_FAILURE = 0x5003 ATTRIBUTE_ACCESS_DENIED =", "ATTRIBUTE_UPDATE_FAILURE = 0x5003 ATTRIBUTE_ACCESS_DENIED = 0x5004 ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF IO_NOT_READY", "PARAMETER_UNKNOWN_ERROR : \"Unknown error in parameters\", FORMAT_INVALID : \"Invalid syntax/format\",", "ATTRIBUTE_NOT_AVAILABLE : \"Not available\", ATTRIBUTE_GET_FAILURE : \"Failed to retrieve\", ATTRIBUTE_SET_FAILURE", "= 0xFFFF ## Error message of each error code gErrorMessage", "= 0x0FFF OPTION_UNKNOWN = 0x1000 OPTION_MISSING = 0x1001 OPTION_CONFLICT =", "error\", ATTRIBUTE_NOT_AVAILABLE : \"Not available\", ATTRIBUTE_GET_FAILURE : \"Failed to retrieve\",", "= 0x4000 RESOURCE_ALLOCATE_FAILURE = 0x4001 RESOURCE_FULL = 0x4002 RESOURCE_OVERFLOW =", "not found in workspace\", FILE_OPEN_FAILURE : \"File open failure\", FILE_WRITE_FAILURE", "RESOURCE_UNKNOWN_ERROR : \"Unknown error\", ATTRIBUTE_NOT_AVAILABLE : \"Not available\", ATTRIBUTE_GET_FAILURE :", "# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON", ": \"Deprecated option\", OPTION_NOT_SUPPORTED : \"Unsupported option\", OPTION_UNKNOWN_ERROR : \"Unknown", "failure\", FILE_CREATE_FAILURE : \"File create failure\", FILE_CHECKSUM_FAILURE : \"Invalid checksum", "0xFFFF ## Error message of each error code gErrorMessage =", "UNKNOWN_ERROR = 0xFFFF ## Error message of each error code", "failure\", FILE_READ_FAILURE : \"File read failure\", FILE_CREATE_FAILURE : \"File create", "seeking position\", FILE_ALREADY_EXIST : \"File or directory already exists\", FILE_TYPE_MISMATCH", "licensed and made available under the terms and conditions of", "FILE_OPEN_FAILURE : \"File open failure\", FILE_WRITE_FAILURE : \"File write failure\",", ": \"Missing option\", OPTION_CONFLICT : \"Conflict options\", OPTION_VALUE_INVALID : \"Invalid", ": \"File move failure\", FILE_DELETE_FAILURE : \"File delete failure\", FILE_COPY_FAILURE", "0xF010 PCD_VALIDATION_INFO_ERROR = 0xF011 PCD_VARIABLE_ATTRIBUTES_ERROR = 0xF012 PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR = 0xF013", "FORMAT_NOT_SUPPORTED : \"Not supported syntax/format\", FORMAT_UNKNOWN : \"Unknown format\", FORMAT_UNKNOWN_ERROR", "each error code gErrorMessage = { FILE_NOT_FOUND : \"File/directory not", "ATTRIBUTE_ACCESS_DENIED : \"Access denied\", ATTRIBUTE_UNKNOWN_ERROR : \"Unknown error when accessing\",", "= 0x4FFF ATTRIBUTE_NOT_AVAILABLE = 0x5000 ATTRIBUTE_GET_FAILURE = 0x5001 ATTRIBUTE_SET_FAILURE =", "failure\", FILE_WRITE_FAILURE : \"File write failure\", FILE_PARSE_FAILURE : \"File parse", ": \"Unknown error encountered on file\", OPTION_UNKNOWN : \"Unknown option\",", ": \"Not available\", ATTRIBUTE_GET_FAILURE : \"Failed to retrieve\", ATTRIBUTE_SET_FAILURE :", "\"Unknown error when processing options\", PARAMETER_INVALID : \"Invalid parameter\", PARAMETER_MISSING", "IO operation\", UNKNOWN_ERROR : \"Unknown error\", } ## Exception indicating", "options\", OPTION_VALUE_INVALID : \"Invalid value of option\", OPTION_DEPRECATED : \"Deprecated", "and made available under the terms and conditions of the", "and the accompanying materials # are licensed and made available", "found in workspace\", FILE_OPEN_FAILURE : \"File open failure\", FILE_WRITE_FAILURE :", "16 FILE_DUPLICATED = 17 FILE_UNKNOWN_ERROR = 0x0FFF OPTION_UNKNOWN = 0x1000", "\"Not supported syntax/format\", FORMAT_UNKNOWN : \"Unknown format\", FORMAT_UNKNOWN_ERROR : \"Unknown", "0xF013 ABORT_ERROR = 0xFFFE UNKNOWN_ERROR = 0xFFFF ## Error message", "execute command\", IO_NOT_READY : \"Not ready\", IO_BUSY : \"Busy\", IO_TIMEOUT", "ATTRIBUTE_GET_FAILURE : \"Failed to retrieve\", ATTRIBUTE_SET_FAILURE : \"Failed to set\",", "parse failure\", FILE_READ_FAILURE : \"File read failure\", FILE_CREATE_FAILURE : \"File", "read failure\", FILE_CREATE_FAILURE : \"File create failure\", FILE_CHECKSUM_FAILURE : \"Invalid", "to update\", ATTRIBUTE_ACCESS_DENIED : \"Access denied\", ATTRIBUTE_UNKNOWN_ERROR : \"Unknown error", "= 0x2000 PARAMETER_MISSING = 0x2001 PARAMETER_UNKNOWN_ERROR =0x2FFF FORMAT_INVALID = 0x3000", "FILE_MOVE_FAILURE : \"File move failure\", FILE_DELETE_FAILURE : \"File delete failure\",", "\"Unknown error in parameters\", FORMAT_INVALID : \"Invalid syntax/format\", FORMAT_NOT_SUPPORTED :", "program and the accompanying materials # are licensed and made", "ATTRIBUTE_NOT_AVAILABLE = 0x5000 ATTRIBUTE_GET_FAILURE = 0x5001 ATTRIBUTE_SET_FAILURE = 0x5002 ATTRIBUTE_UPDATE_FAILURE", "= 0x6002 IO_UNKNOWN_ERROR = 0x6FFF COMMAND_FAILURE = 0x7000 PERMISSION_FAILURE =", "directory already exists\", FILE_TYPE_MISMATCH : \"Incorrect file type\", FILE_CASE_MISMATCH :", "position\", FILE_ALREADY_EXIST : \"File or directory already exists\", FILE_TYPE_MISMATCH :", "a fatal error class FatalError(Exception): pass if __name__ == \"__main__\":", ": \"Duplicated file found\", FILE_UNKNOWN_ERROR : \"Unknown error encountered on", "and conditions of the BSD License # which accompanies this", "FILE_DECOMPRESS_FAILURE = 8 FILE_MOVE_FAILURE = 9 FILE_DELETE_FAILURE = 10 FILE_COPY_FAILURE", "create failure\", FILE_CHECKSUM_FAILURE : \"Invalid checksum of file\", FILE_COMPRESS_FAILURE :", "parameter\", PARAMETER_UNKNOWN_ERROR : \"Unknown error in parameters\", FORMAT_INVALID : \"Invalid", "WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR", "ATTRIBUTE_UNKNOWN_ERROR : \"Unknown error when accessing\", COMMAND_FAILURE : \"Failed to", "command\", IO_NOT_READY : \"Not ready\", IO_BUSY : \"Busy\", IO_TIMEOUT :", "error when accessing\", COMMAND_FAILURE : \"Failed to execute command\", IO_NOT_READY", "\"File or directory already exists\", FILE_TYPE_MISMATCH : \"Incorrect file type\",", "= 0x5FFF IO_NOT_READY = 0x6000 IO_BUSY = 0x6001 IO_TIMEOUT =", "0xF012 PCD_VARIABLE_ATTRIBUTES_CONFLICT_ERROR = 0xF013 ABORT_ERROR = 0xFFFE UNKNOWN_ERROR = 0xFFFF", "= 1 FILE_WRITE_FAILURE = 2 FILE_PARSE_FAILURE = 3 FILE_READ_FAILURE =", "FILE_CASE_MISMATCH : \"File name case mismatch\", FILE_DUPLICATED : \"Duplicated file", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY", "= 0x8000 CODE_ERROR = 0xC0DE AUTOGEN_ERROR = 0xF000 PARSER_ERROR =", "ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF IO_NOT_READY = 0x6000 IO_BUSY = 0x6001 IO_TIMEOUT", ": \"Unknown error in parameters\", FORMAT_INVALID : \"Invalid syntax/format\", FORMAT_NOT_SUPPORTED", "option\", OPTION_MISSING : \"Missing option\", OPTION_CONFLICT : \"Conflict options\", OPTION_VALUE_INVALID", "\"Failed to execute command\", IO_NOT_READY : \"Not ready\", IO_BUSY :", "PARAMETER_INVALID : \"Invalid parameter\", PARAMETER_MISSING : \"Missing parameter\", PARAMETER_UNKNOWN_ERROR :", "\"Not available\", RESOURCE_ALLOCATE_FAILURE : \"Allocate failure\", RESOURCE_FULL : \"Full\", RESOURCE_OVERFLOW", "IO_NOT_READY = 0x6000 IO_BUSY = 0x6001 IO_TIMEOUT = 0x6002 IO_UNKNOWN_ERROR", "RESOURCE_OVERFLOW : \"Overflow\", RESOURCE_UNDERRUN : \"Underrun\", RESOURCE_UNKNOWN_ERROR : \"Unknown error\",", "10 FILE_COPY_FAILURE = 11 FILE_POSITIONING_FAILURE = 12 FILE_ALREADY_EXIST = 13", "rights reserved.<BR> # This program and the accompanying materials #", "\"Deprecated option\", OPTION_NOT_SUPPORTED : \"Unsupported option\", OPTION_UNKNOWN_ERROR : \"Unknown error", "\"Invalid parameter\", PARAMETER_MISSING : \"Missing parameter\", PARAMETER_UNKNOWN_ERROR : \"Unknown error", "PARSER_ERROR = 0xF001 BUILD_ERROR = 0xF002 GENFDS_ERROR = 0xF003 ECC_ERROR", "accompanying materials # are licensed and made available under the", "the terms and conditions of the BSD License # which", "0x6002 IO_UNKNOWN_ERROR = 0x6FFF COMMAND_FAILURE = 0x7000 PERMISSION_FAILURE = 0x8000", "to retrieve\", ATTRIBUTE_SET_FAILURE : \"Failed to set\", ATTRIBUTE_UPDATE_FAILURE: \"Failed to", "to execute command\", IO_NOT_READY : \"Not ready\", IO_BUSY : \"Busy\",", "BSD License # which accompanies this distribution. The full text", "\"Incorrect file type\", FILE_CASE_MISMATCH : \"File name case mismatch\", FILE_DUPLICATED", "0xF004 EOT_ERROR = 0xF005 DDC_ERROR = 0xF009 WARNING_AS_ERROR = 0xF006", ": \"File compress failure\", FILE_DECOMPRESS_FAILURE : \"File decompress failure\", FILE_MOVE_FAILURE", "FILE_DUPLICATED = 17 FILE_UNKNOWN_ERROR = 0x0FFF OPTION_UNKNOWN = 0x1000 OPTION_MISSING", "IO_BUSY : \"Busy\", IO_TIMEOUT : \"Timeout\", IO_UNKNOWN_ERROR : \"Unknown error", "0x1000 OPTION_MISSING = 0x1001 OPTION_CONFLICT = 0x1002 OPTION_VALUE_INVALID = 0x1003", "\"Missing option\", OPTION_CONFLICT : \"Conflict options\", OPTION_VALUE_INVALID : \"Invalid value", "\"Full\", RESOURCE_OVERFLOW : \"Overflow\", RESOURCE_UNDERRUN : \"Underrun\", RESOURCE_UNKNOWN_ERROR : \"Unknown", "0x5004 ATTRIBUTE_UNKNOWN_ERROR = 0x5FFF IO_NOT_READY = 0x6000 IO_BUSY = 0x6001", "ON AN \"AS IS\" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS", "= 0x5002 ATTRIBUTE_UPDATE_FAILURE = 0x5003 ATTRIBUTE_ACCESS_DENIED = 0x5004 ATTRIBUTE_UNKNOWN_ERROR =", ": \"File create failure\", FILE_CHECKSUM_FAILURE : \"Invalid checksum of file\",", "\"Unknown error\", } ## Exception indicating a fatal error class", "0x3FFF RESOURCE_NOT_AVAILABLE = 0x4000 RESOURCE_ALLOCATE_FAILURE = 0x4001 RESOURCE_FULL = 0x4002", "\"File name case mismatch\", FILE_DUPLICATED : \"Duplicated file found\", FILE_UNKNOWN_ERROR", "in IO operation\", UNKNOWN_ERROR : \"Unknown error\", } ## Exception", "FILE_COPY_FAILURE = 11 FILE_POSITIONING_FAILURE = 12 FILE_ALREADY_EXIST = 13 FILE_NOT_FOUND", ": \"Unknown error when processing options\", PARAMETER_INVALID : \"Invalid parameter\",", "= 0xF000 PARSER_ERROR = 0xF001 BUILD_ERROR = 0xF002 GENFDS_ERROR =", "FILE_NOT_FOUND : \"File/directory not found in workspace\", FILE_OPEN_FAILURE : \"File", "= 8 FILE_MOVE_FAILURE = 9 FILE_DELETE_FAILURE = 10 FILE_COPY_FAILURE =", "fatal error class FatalError(Exception): pass if __name__ == \"__main__\": pass", "PARAMETER_UNKNOWN_ERROR =0x2FFF FORMAT_INVALID = 0x3000 FORMAT_NOT_SUPPORTED = 0x3001 FORMAT_UNKNOWN =", ": \"Full\", RESOURCE_OVERFLOW : \"Overflow\", RESOURCE_UNDERRUN : \"Underrun\", RESOURCE_UNKNOWN_ERROR :", "LICENSE ON AN \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "OPTION_UNKNOWN_ERROR = 0x1FFF PARAMETER_INVALID = 0x2000 PARAMETER_MISSING = 0x2001 PARAMETER_UNKNOWN_ERROR", "available\", RESOURCE_ALLOCATE_FAILURE : \"Allocate failure\", RESOURCE_FULL : \"Full\", RESOURCE_OVERFLOW :", "# # Copyright (c) 2007 - 2015, Intel Corporation. All", "FILE_WRITE_FAILURE : \"File write failure\", FILE_PARSE_FAILURE : \"File parse failure\",", "\"Allocate failure\", RESOURCE_FULL : \"Full\", RESOURCE_OVERFLOW : \"Overflow\", RESOURCE_UNDERRUN :", "=0x2FFF FORMAT_INVALID = 0x3000 FORMAT_NOT_SUPPORTED = 0x3001 FORMAT_UNKNOWN = 0x3002", "to set\", ATTRIBUTE_UPDATE_FAILURE: \"Failed to update\", ATTRIBUTE_ACCESS_DENIED : \"Access denied\",", "\"File create failure\", FILE_CHECKSUM_FAILURE : \"Invalid checksum of file\", FILE_COMPRESS_FAILURE", "2 FILE_PARSE_FAILURE = 3 FILE_READ_FAILURE = 4 FILE_CREATE_FAILURE = 5", "12 FILE_ALREADY_EXIST = 13 FILE_NOT_FOUND = 14 FILE_TYPE_MISMATCH = 15", "= 0xF006 MIGRATION_ERROR = 0xF010 PCD_VALIDATION_INFO_ERROR = 0xF011 PCD_VARIABLE_ATTRIBUTES_ERROR =", "of the BSD License # which accompanies this distribution. The", "## Error message of each error code gErrorMessage = {", "FILE_DUPLICATED : \"Duplicated file found\", FILE_UNKNOWN_ERROR : \"Unknown error encountered", "0xC0DE AUTOGEN_ERROR = 0xF000 PARSER_ERROR = 0xF001 BUILD_ERROR = 0xF002", "= { FILE_NOT_FOUND : \"File/directory not found in workspace\", FILE_OPEN_FAILURE", "REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. # FILE_OPEN_FAILURE", "parameter\", PARAMETER_MISSING : \"Missing parameter\", PARAMETER_UNKNOWN_ERROR : \"Unknown error in" ]
[ "random class SUN397EncodableDataset(Dataset): \"\"\"SUN397 encodable dataset class\"\"\" def __init__(self, train=True):", "transforms.Resize((224, 224)), transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) ])", "'data/SUN397/train/*/*.jpg' if train else 'data/SUN397/test/*/*.jpg' self.data = list(glob.glob(path)) random.shuffle(self.data) cats", "class\"\"\" def __init__(self, train=True): super().__init__() path = 'data/SUN397/train/*/*.jpg' if train", "glob from PIL import Image import random class SUN397EncodableDataset(Dataset): \"\"\"SUN397", "224)), transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) ]) self.device", "import torch import torchvision.transforms as transforms from torch.utils.data import Dataset", "if torch.cuda.is_available() else \"cpu\") def __getitem__(self, idx): if torch.is_tensor(idx): idx", "= 'data/SUN397/train/*/*.jpg' if train else 'data/SUN397/test/*/*.jpg' self.data = list(glob.glob(path)) random.shuffle(self.data)", "\"\"\"SUN397 encodable dataset class\"\"\" def __init__(self, train=True): super().__init__() path =", "transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) ]) self.device = torch.device(\"cuda:0\"", "len(self.encoded_data) == 0: return self.preprocessor(Image.open(self.data[idx]).convert('RGB')), self.labels[idx] return self.encoded_data[idx], self.labels[idx] def", "cats.sort() self.labels = torch.LongTensor([cats.index(path.split(\"/\")[3]) for path in self.data]) self.preprocessor =", "0: return self.preprocessor(Image.open(self.data[idx]).convert('RGB')), self.labels[idx] return self.encoded_data[idx], self.labels[idx] def __len__(self): return", "self.preprocessor(Image.open(self.data[idx]).convert('RGB')), self.labels[idx] return self.encoded_data[idx], self.labels[idx] def __len__(self): return len(self.labels) def", "as transforms from torch.utils.data import Dataset import glob from PIL", "self.encoded_data[idx], self.labels[idx] def __len__(self): return len(self.labels) def num_classes(self): return int(max(self.labels)", "self.labels[idx] return self.encoded_data[idx], self.labels[idx] def __len__(self): return len(self.labels) def num_classes(self):", "PIL import Image import random class SUN397EncodableDataset(Dataset): \"\"\"SUN397 encodable dataset", "path in self.data])) cats.sort() self.labels = torch.LongTensor([cats.index(path.split(\"/\")[3]) for path in", "import glob from PIL import Image import random class SUN397EncodableDataset(Dataset):", "= list(set([path.split(\"/\")[3] for path in self.data])) cats.sort() self.labels = torch.LongTensor([cats.index(path.split(\"/\")[3])", "import Dataset import glob from PIL import Image import random", "if len(self.encoded_data) == 0: return self.preprocessor(Image.open(self.data[idx]).convert('RGB')), self.labels[idx] return self.encoded_data[idx], self.labels[idx]", "if train else 'data/SUN397/test/*/*.jpg' self.data = list(glob.glob(path)) random.shuffle(self.data) cats =", "def __getitem__(self, idx): if torch.is_tensor(idx): idx = idx.tolist() if len(self.encoded_data)", "torch import torchvision.transforms as transforms from torch.utils.data import Dataset import", "idx.tolist() if len(self.encoded_data) == 0: return self.preprocessor(Image.open(self.data[idx]).convert('RGB')), self.labels[idx] return self.encoded_data[idx],", "path in self.data]) self.preprocessor = transforms.Compose([ transforms.Resize((224, 224)), transforms.ToTensor(), transforms.Normalize(mean=[0.485,", "torchvision.transforms as transforms from torch.utils.data import Dataset import glob from", "return self.encoded_data[idx], self.labels[idx] def __len__(self): return len(self.labels) def num_classes(self): return", "SUN397EncodableDataset(Dataset): \"\"\"SUN397 encodable dataset class\"\"\" def __init__(self, train=True): super().__init__() path", "encodable dataset class\"\"\" def __init__(self, train=True): super().__init__() path = 'data/SUN397/train/*/*.jpg'", "0.225]) ]) self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def", "self.labels[idx] def __len__(self): return len(self.labels) def num_classes(self): return int(max(self.labels) +", "from torch.utils.data import Dataset import glob from PIL import Image", "import torchvision.transforms as transforms from torch.utils.data import Dataset import glob", "'data/SUN397/test/*/*.jpg' self.data = list(glob.glob(path)) random.shuffle(self.data) cats = list(set([path.split(\"/\")[3] for path", "self.labels = torch.LongTensor([cats.index(path.split(\"/\")[3]) for path in self.data]) self.preprocessor = transforms.Compose([", "]) self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def __getitem__(self,", "= list(glob.glob(path)) random.shuffle(self.data) cats = list(set([path.split(\"/\")[3] for path in self.data]))", "self.preprocessor = transforms.Compose([ transforms.Resize((224, 224)), transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229,", "import random class SUN397EncodableDataset(Dataset): \"\"\"SUN397 encodable dataset class\"\"\" def __init__(self,", "else 'data/SUN397/test/*/*.jpg' self.data = list(glob.glob(path)) random.shuffle(self.data) cats = list(set([path.split(\"/\")[3] for", "transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) ]) self.device =", "Image import random class SUN397EncodableDataset(Dataset): \"\"\"SUN397 encodable dataset class\"\"\" def", "== 0: return self.preprocessor(Image.open(self.data[idx]).convert('RGB')), self.labels[idx] return self.encoded_data[idx], self.labels[idx] def __len__(self):", "__getitem__(self, idx): if torch.is_tensor(idx): idx = idx.tolist() if len(self.encoded_data) ==", "list(set([path.split(\"/\")[3] for path in self.data])) cats.sort() self.labels = torch.LongTensor([cats.index(path.split(\"/\")[3]) for", "random.shuffle(self.data) cats = list(set([path.split(\"/\")[3] for path in self.data])) cats.sort() self.labels", "std=[0.229, 0.224, 0.225]) ]) self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else", "train else 'data/SUN397/test/*/*.jpg' self.data = list(glob.glob(path)) random.shuffle(self.data) cats = list(set([path.split(\"/\")[3]", "def __len__(self): return len(self.labels) def num_classes(self): return int(max(self.labels) + 1)", "torch.cuda.is_available() else \"cpu\") def __getitem__(self, idx): if torch.is_tensor(idx): idx =", "return self.preprocessor(Image.open(self.data[idx]).convert('RGB')), self.labels[idx] return self.encoded_data[idx], self.labels[idx] def __len__(self): return len(self.labels)", "self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def __getitem__(self, idx):", "from PIL import Image import random class SUN397EncodableDataset(Dataset): \"\"\"SUN397 encodable", "idx = idx.tolist() if len(self.encoded_data) == 0: return self.preprocessor(Image.open(self.data[idx]).convert('RGB')), self.labels[idx]", "def __init__(self, train=True): super().__init__() path = 'data/SUN397/train/*/*.jpg' if train else", "= torch.LongTensor([cats.index(path.split(\"/\")[3]) for path in self.data]) self.preprocessor = transforms.Compose([ transforms.Resize((224,", "= transforms.Compose([ transforms.Resize((224, 224)), transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224,", "= idx.tolist() if len(self.encoded_data) == 0: return self.preprocessor(Image.open(self.data[idx]).convert('RGB')), self.labels[idx] return", "\"cpu\") def __getitem__(self, idx): if torch.is_tensor(idx): idx = idx.tolist() if", "transforms.Compose([ transforms.Resize((224, 224)), transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])", "if torch.is_tensor(idx): idx = idx.tolist() if len(self.encoded_data) == 0: return", "= torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def __getitem__(self, idx): if", "list(glob.glob(path)) random.shuffle(self.data) cats = list(set([path.split(\"/\")[3] for path in self.data])) cats.sort()", "__init__(self, train=True): super().__init__() path = 'data/SUN397/train/*/*.jpg' if train else 'data/SUN397/test/*/*.jpg'", "torch.is_tensor(idx): idx = idx.tolist() if len(self.encoded_data) == 0: return self.preprocessor(Image.open(self.data[idx]).convert('RGB')),", "train=True): super().__init__() path = 'data/SUN397/train/*/*.jpg' if train else 'data/SUN397/test/*/*.jpg' self.data", "0.224, 0.225]) ]) self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")", "idx): if torch.is_tensor(idx): idx = idx.tolist() if len(self.encoded_data) == 0:", "import Image import random class SUN397EncodableDataset(Dataset): \"\"\"SUN397 encodable dataset class\"\"\"", "self.data])) cats.sort() self.labels = torch.LongTensor([cats.index(path.split(\"/\")[3]) for path in self.data]) self.preprocessor", "for path in self.data])) cats.sort() self.labels = torch.LongTensor([cats.index(path.split(\"/\")[3]) for path", "torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def __getitem__(self, idx): if torch.is_tensor(idx):", "super().__init__() path = 'data/SUN397/train/*/*.jpg' if train else 'data/SUN397/test/*/*.jpg' self.data =", "for path in self.data]) self.preprocessor = transforms.Compose([ transforms.Resize((224, 224)), transforms.ToTensor(),", "in self.data]) self.preprocessor = transforms.Compose([ transforms.Resize((224, 224)), transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456,", "else \"cpu\") def __getitem__(self, idx): if torch.is_tensor(idx): idx = idx.tolist()", "self.data]) self.preprocessor = transforms.Compose([ transforms.Resize((224, 224)), transforms.ToTensor(), transforms.Normalize(mean=[0.485, 0.456, 0.406],", "0.406], std=[0.229, 0.224, 0.225]) ]) self.device = torch.device(\"cuda:0\" if torch.cuda.is_available()", "path = 'data/SUN397/train/*/*.jpg' if train else 'data/SUN397/test/*/*.jpg' self.data = list(glob.glob(path))", "self.data = list(glob.glob(path)) random.shuffle(self.data) cats = list(set([path.split(\"/\")[3] for path in", "0.456, 0.406], std=[0.229, 0.224, 0.225]) ]) self.device = torch.device(\"cuda:0\" if", "class SUN397EncodableDataset(Dataset): \"\"\"SUN397 encodable dataset class\"\"\" def __init__(self, train=True): super().__init__()", "torch.utils.data import Dataset import glob from PIL import Image import", "torch.LongTensor([cats.index(path.split(\"/\")[3]) for path in self.data]) self.preprocessor = transforms.Compose([ transforms.Resize((224, 224)),", "dataset class\"\"\" def __init__(self, train=True): super().__init__() path = 'data/SUN397/train/*/*.jpg' if", "cats = list(set([path.split(\"/\")[3] for path in self.data])) cats.sort() self.labels =", "transforms from torch.utils.data import Dataset import glob from PIL import", "Dataset import glob from PIL import Image import random class", "in self.data])) cats.sort() self.labels = torch.LongTensor([cats.index(path.split(\"/\")[3]) for path in self.data])" ]
[ "All rights reserved. # See LICENSE.txt for complete terms. from", "= super(Location, self).to_dict() if self._XSI_TYPE: d[\"xsi:type\"] = self._XSI_TYPE return d", "= common_binding.LocationType _namespace = 'http://cybox.mitre.org/common-2' _XSI_TYPE = None # overridden", "fields.IdrefField(\"idref\") name = fields.TypedField(\"Name\") def to_dict(self): d = super(Location, self).to_dict()", "_binding_class = common_binding.LocationType _namespace = 'http://cybox.mitre.org/common-2' _XSI_TYPE = None #", "terms. from mixbox import entities, fields import cybox import cybox.bindings.cybox_common", "= None # overridden by subclasses id_ = fields.IdrefField(\"id\") idref", "rights reserved. # See LICENSE.txt for complete terms. from mixbox", "The MITRE Corporation. All rights reserved. # See LICENSE.txt for", "from mixbox import entities, fields import cybox import cybox.bindings.cybox_common as", "id_ = fields.IdrefField(\"id\") idref = fields.IdrefField(\"idref\") name = fields.TypedField(\"Name\") def", "cybox.lookup_extension(key, default=Location) class Location(entities.Entity): _binding = common_binding _binding_class = common_binding.LocationType", "fields import cybox import cybox.bindings.cybox_common as common_binding class LocationFactory(entities.EntityFactory): @classmethod", "to_dict(self): d = super(Location, self).to_dict() if self._XSI_TYPE: d[\"xsi:type\"] = self._XSI_TYPE", "super(Location, self).to_dict() if self._XSI_TYPE: d[\"xsi:type\"] = self._XSI_TYPE return d @staticmethod", "# See LICENSE.txt for complete terms. from mixbox import entities,", "common_binding class LocationFactory(entities.EntityFactory): @classmethod def entity_class(cls, key): return cybox.lookup_extension(key, default=Location)", "LocationFactory(entities.EntityFactory): @classmethod def entity_class(cls, key): return cybox.lookup_extension(key, default=Location) class Location(entities.Entity):", "= fields.TypedField(\"Name\") def to_dict(self): d = super(Location, self).to_dict() if self._XSI_TYPE:", "def to_dict(self): d = super(Location, self).to_dict() if self._XSI_TYPE: d[\"xsi:type\"] =", "common_binding.LocationType _namespace = 'http://cybox.mitre.org/common-2' _XSI_TYPE = None # overridden by", "key): return cybox.lookup_extension(key, default=Location) class Location(entities.Entity): _binding = common_binding _binding_class", "mixbox import entities, fields import cybox import cybox.bindings.cybox_common as common_binding", "Corporation. All rights reserved. # See LICENSE.txt for complete terms.", "common_binding _binding_class = common_binding.LocationType _namespace = 'http://cybox.mitre.org/common-2' _XSI_TYPE = None", "import cybox import cybox.bindings.cybox_common as common_binding class LocationFactory(entities.EntityFactory): @classmethod def", "(c) 2017, The MITRE Corporation. All rights reserved. # See", "def entity_class(cls, key): return cybox.lookup_extension(key, default=Location) class Location(entities.Entity): _binding =", "'http://cybox.mitre.org/common-2' _XSI_TYPE = None # overridden by subclasses id_ =", "class LocationFactory(entities.EntityFactory): @classmethod def entity_class(cls, key): return cybox.lookup_extension(key, default=Location) class", "See LICENSE.txt for complete terms. from mixbox import entities, fields", "by subclasses id_ = fields.IdrefField(\"id\") idref = fields.IdrefField(\"idref\") name =", "None # overridden by subclasses id_ = fields.IdrefField(\"id\") idref =", "complete terms. from mixbox import entities, fields import cybox import", "d = super(Location, self).to_dict() if self._XSI_TYPE: d[\"xsi:type\"] = self._XSI_TYPE return", "entities, fields import cybox import cybox.bindings.cybox_common as common_binding class LocationFactory(entities.EntityFactory):", "subclasses id_ = fields.IdrefField(\"id\") idref = fields.IdrefField(\"idref\") name = fields.TypedField(\"Name\")", "= common_binding _binding_class = common_binding.LocationType _namespace = 'http://cybox.mitre.org/common-2' _XSI_TYPE =", "if self._XSI_TYPE: d[\"xsi:type\"] = self._XSI_TYPE return d @staticmethod def lookup_class(xsi_type):", "_binding = common_binding _binding_class = common_binding.LocationType _namespace = 'http://cybox.mitre.org/common-2' _XSI_TYPE", "= fields.IdrefField(\"id\") idref = fields.IdrefField(\"idref\") name = fields.TypedField(\"Name\") def to_dict(self):", "fields.TypedField(\"Name\") def to_dict(self): d = super(Location, self).to_dict() if self._XSI_TYPE: d[\"xsi:type\"]", "fields.IdrefField(\"id\") idref = fields.IdrefField(\"idref\") name = fields.TypedField(\"Name\") def to_dict(self): d", "d[\"xsi:type\"] = self._XSI_TYPE return d @staticmethod def lookup_class(xsi_type): return cybox.lookup_extension(xsi_type,", "name = fields.TypedField(\"Name\") def to_dict(self): d = super(Location, self).to_dict() if", "self._XSI_TYPE: d[\"xsi:type\"] = self._XSI_TYPE return d @staticmethod def lookup_class(xsi_type): return", "_namespace = 'http://cybox.mitre.org/common-2' _XSI_TYPE = None # overridden by subclasses", "MITRE Corporation. All rights reserved. # See LICENSE.txt for complete", "@classmethod def entity_class(cls, key): return cybox.lookup_extension(key, default=Location) class Location(entities.Entity): _binding", "Copyright (c) 2017, The MITRE Corporation. All rights reserved. #", "# Copyright (c) 2017, The MITRE Corporation. All rights reserved.", "import cybox.bindings.cybox_common as common_binding class LocationFactory(entities.EntityFactory): @classmethod def entity_class(cls, key):", "entity_class(cls, key): return cybox.lookup_extension(key, default=Location) class Location(entities.Entity): _binding = common_binding", "import entities, fields import cybox import cybox.bindings.cybox_common as common_binding class", "= 'http://cybox.mitre.org/common-2' _XSI_TYPE = None # overridden by subclasses id_", "2017, The MITRE Corporation. All rights reserved. # See LICENSE.txt", "idref = fields.IdrefField(\"idref\") name = fields.TypedField(\"Name\") def to_dict(self): d =", "= self._XSI_TYPE return d @staticmethod def lookup_class(xsi_type): return cybox.lookup_extension(xsi_type, default=Location)", "# overridden by subclasses id_ = fields.IdrefField(\"id\") idref = fields.IdrefField(\"idref\")", "as common_binding class LocationFactory(entities.EntityFactory): @classmethod def entity_class(cls, key): return cybox.lookup_extension(key,", "self).to_dict() if self._XSI_TYPE: d[\"xsi:type\"] = self._XSI_TYPE return d @staticmethod def", "default=Location) class Location(entities.Entity): _binding = common_binding _binding_class = common_binding.LocationType _namespace", "overridden by subclasses id_ = fields.IdrefField(\"id\") idref = fields.IdrefField(\"idref\") name", "<reponame>tirkarthi/python-cybox<filename>cybox/common/location.py # Copyright (c) 2017, The MITRE Corporation. All rights", "class Location(entities.Entity): _binding = common_binding _binding_class = common_binding.LocationType _namespace =", "_XSI_TYPE = None # overridden by subclasses id_ = fields.IdrefField(\"id\")", "= fields.IdrefField(\"idref\") name = fields.TypedField(\"Name\") def to_dict(self): d = super(Location,", "cybox import cybox.bindings.cybox_common as common_binding class LocationFactory(entities.EntityFactory): @classmethod def entity_class(cls,", "LICENSE.txt for complete terms. from mixbox import entities, fields import", "Location(entities.Entity): _binding = common_binding _binding_class = common_binding.LocationType _namespace = 'http://cybox.mitre.org/common-2'", "cybox.bindings.cybox_common as common_binding class LocationFactory(entities.EntityFactory): @classmethod def entity_class(cls, key): return", "for complete terms. from mixbox import entities, fields import cybox", "reserved. # See LICENSE.txt for complete terms. from mixbox import", "return cybox.lookup_extension(key, default=Location) class Location(entities.Entity): _binding = common_binding _binding_class =" ]
[ "pandas as pd import pysam import matplotlib matplotlib.use(\"agg\") import matplotlib.pyplot", "not None: softclips.append([clip_right, False, is_first_read]) if i == 10000000: break", "max(softclips) plt.plot([m, m], [0, 1.0], \":k\") plt.text(m, 1, \"max={}\".format(m), horizontalalignment=\"right\",", "functools import partial tumor = pysam.AlignmentFile(snakemake.input[0], \"rb\") normal = pysam.AlignmentFile(snakemake.input[1],", "if i == 10000000: break softclips = pd.DataFrame(softclips, columns=[\"len\", \"left\",", "import partial tumor = pysam.AlignmentFile(snakemake.input[0], \"rb\") normal = pysam.AlignmentFile(snakemake.input[1], \"rb\")", "plt.text(m, 1, \"max={}\".format(m), horizontalalignment=\"right\", verticalalignment=\"top\") g = sns.FacetGrid(softclips, col=\"left\", row=\"first_in_pair\")", "as plt import seaborn as sns from functools import partial", "rec in enumerate(normal): if rec.is_supplementary or rec.is_unmapped: continue is_first_read =", "numpy as np import pandas as pd import pysam import", "c: c[1] if c[0] == 4 else None clip_left =", "import matplotlib.pyplot as plt import seaborn as sns from functools", "if clip_right is not None: softclips.append([clip_right, False, is_first_read]) if i", "as sns from functools import partial tumor = pysam.AlignmentFile(snakemake.input[0], \"rb\")", "pysam.AlignmentFile(snakemake.input[1], \"rb\") softclips = [] for i, rec in enumerate(normal):", "1.0], \":k\") plt.text(m, 1, \"max={}\".format(m), horizontalalignment=\"right\", verticalalignment=\"top\") g = sns.FacetGrid(softclips,", "1.0], \"--k\") m = max(softclips) plt.plot([m, m], [0, 1.0], \":k\")", "np import pandas as pd import pysam import matplotlib matplotlib.use(\"agg\")", "rec.mpos get_clip = lambda c: c[1] if c[0] == 4", "or rec.is_unmapped: continue is_first_read = rec.pos < rec.mpos get_clip =", "= rec.pos < rec.mpos get_clip = lambda c: c[1] if", "lambda c: c[1] if c[0] == 4 else None clip_left", "if rec.is_supplementary or rec.is_unmapped: continue is_first_read = rec.pos < rec.mpos", "horizontalalignment=\"right\", verticalalignment=\"top\") g = sns.FacetGrid(softclips, col=\"left\", row=\"first_in_pair\") g = g.map(plot,", "pysam.AlignmentFile(snakemake.input[0], \"rb\") normal = pysam.AlignmentFile(snakemake.input[1], \"rb\") softclips = [] for", "as np import pandas as pd import pysam import matplotlib", "plt import seaborn as sns from functools import partial tumor", "= pysam.AlignmentFile(snakemake.input[1], \"rb\") softclips = [] for i, rec in", "#!/usr/bin/env python import sys import numpy as np import pandas", "def plot(*args, **kwargs): softclips = args[0] plt.hist(softclips, normed=True) q95 =", "is_first_read = rec.pos < rec.mpos get_clip = lambda c: c[1]", "pd.DataFrame(softclips, columns=[\"len\", \"left\", \"first_in_pair\"]) def plot(*args, **kwargs): softclips = args[0]", "is not None: softclips.append([clip_left, True, is_first_read]) clip_right = get_clip(rec.cigartuples[-1]) if", "g = sns.FacetGrid(softclips, col=\"left\", row=\"first_in_pair\") g = g.map(plot, \"len\") plt.savefig(snakemake.output[0])", "import pysam import matplotlib matplotlib.use(\"agg\") import matplotlib.pyplot as plt import", "<gh_stars>1-10 #!/usr/bin/env python import sys import numpy as np import", "[] for i, rec in enumerate(normal): if rec.is_supplementary or rec.is_unmapped:", "c[1] if c[0] == 4 else None clip_left = get_clip(rec.cigartuples[0])", "is_first_read]) if i == 10000000: break softclips = pd.DataFrame(softclips, columns=[\"len\",", "normed=True) q95 = np.percentile(softclips, 99) plt.plot([q95, q95], [0, 1.0], \"--k\")", "sys import numpy as np import pandas as pd import", "get_clip(rec.cigartuples[-1]) if clip_right is not None: softclips.append([clip_right, False, is_first_read]) if", "99) plt.plot([q95, q95], [0, 1.0], \"--k\") m = max(softclips) plt.plot([m,", "i == 10000000: break softclips = pd.DataFrame(softclips, columns=[\"len\", \"left\", \"first_in_pair\"])", "= get_clip(rec.cigartuples[-1]) if clip_right is not None: softclips.append([clip_right, False, is_first_read])", "get_clip(rec.cigartuples[0]) if clip_left is not None: softclips.append([clip_left, True, is_first_read]) clip_right", "args[0] plt.hist(softclips, normed=True) q95 = np.percentile(softclips, 99) plt.plot([q95, q95], [0,", "None: softclips.append([clip_right, False, is_first_read]) if i == 10000000: break softclips", "clip_right is not None: softclips.append([clip_right, False, is_first_read]) if i ==", "is_first_read]) clip_right = get_clip(rec.cigartuples[-1]) if clip_right is not None: softclips.append([clip_right,", "not None: softclips.append([clip_left, True, is_first_read]) clip_right = get_clip(rec.cigartuples[-1]) if clip_right", "matplotlib.pyplot as plt import seaborn as sns from functools import", "import seaborn as sns from functools import partial tumor =", "= get_clip(rec.cigartuples[0]) if clip_left is not None: softclips.append([clip_left, True, is_first_read])", "rec.is_supplementary or rec.is_unmapped: continue is_first_read = rec.pos < rec.mpos get_clip", "if clip_left is not None: softclips.append([clip_left, True, is_first_read]) clip_right =", "pd import pysam import matplotlib matplotlib.use(\"agg\") import matplotlib.pyplot as plt", "matplotlib matplotlib.use(\"agg\") import matplotlib.pyplot as plt import seaborn as sns", "break softclips = pd.DataFrame(softclips, columns=[\"len\", \"left\", \"first_in_pair\"]) def plot(*args, **kwargs):", "m], [0, 1.0], \":k\") plt.text(m, 1, \"max={}\".format(m), horizontalalignment=\"right\", verticalalignment=\"top\") g", "pysam import matplotlib matplotlib.use(\"agg\") import matplotlib.pyplot as plt import seaborn", "rec.pos < rec.mpos get_clip = lambda c: c[1] if c[0]", "rec.is_unmapped: continue is_first_read = rec.pos < rec.mpos get_clip = lambda", "\"--k\") m = max(softclips) plt.plot([m, m], [0, 1.0], \":k\") plt.text(m,", "is not None: softclips.append([clip_right, False, is_first_read]) if i == 10000000:", "None clip_left = get_clip(rec.cigartuples[0]) if clip_left is not None: softclips.append([clip_left,", "1, \"max={}\".format(m), horizontalalignment=\"right\", verticalalignment=\"top\") g = sns.FacetGrid(softclips, col=\"left\", row=\"first_in_pair\") g", "softclips = args[0] plt.hist(softclips, normed=True) q95 = np.percentile(softclips, 99) plt.plot([q95,", "[0, 1.0], \":k\") plt.text(m, 1, \"max={}\".format(m), horizontalalignment=\"right\", verticalalignment=\"top\") g =", "= lambda c: c[1] if c[0] == 4 else None", "tumor = pysam.AlignmentFile(snakemake.input[0], \"rb\") normal = pysam.AlignmentFile(snakemake.input[1], \"rb\") softclips =", "plot(*args, **kwargs): softclips = args[0] plt.hist(softclips, normed=True) q95 = np.percentile(softclips,", "\"rb\") softclips = [] for i, rec in enumerate(normal): if", "clip_right = get_clip(rec.cigartuples[-1]) if clip_right is not None: softclips.append([clip_right, False,", "== 4 else None clip_left = get_clip(rec.cigartuples[0]) if clip_left is", "10000000: break softclips = pd.DataFrame(softclips, columns=[\"len\", \"left\", \"first_in_pair\"]) def plot(*args,", "for i, rec in enumerate(normal): if rec.is_supplementary or rec.is_unmapped: continue", "4 else None clip_left = get_clip(rec.cigartuples[0]) if clip_left is not", "**kwargs): softclips = args[0] plt.hist(softclips, normed=True) q95 = np.percentile(softclips, 99)", "import matplotlib matplotlib.use(\"agg\") import matplotlib.pyplot as plt import seaborn as", "i, rec in enumerate(normal): if rec.is_supplementary or rec.is_unmapped: continue is_first_read", "seaborn as sns from functools import partial tumor = pysam.AlignmentFile(snakemake.input[0],", "None: softclips.append([clip_left, True, is_first_read]) clip_right = get_clip(rec.cigartuples[-1]) if clip_right is", "verticalalignment=\"top\") g = sns.FacetGrid(softclips, col=\"left\", row=\"first_in_pair\") g = g.map(plot, \"len\")", "clip_left is not None: softclips.append([clip_left, True, is_first_read]) clip_right = get_clip(rec.cigartuples[-1])", "import numpy as np import pandas as pd import pysam", "softclips = pd.DataFrame(softclips, columns=[\"len\", \"left\", \"first_in_pair\"]) def plot(*args, **kwargs): softclips", "columns=[\"len\", \"left\", \"first_in_pair\"]) def plot(*args, **kwargs): softclips = args[0] plt.hist(softclips,", "sns from functools import partial tumor = pysam.AlignmentFile(snakemake.input[0], \"rb\") normal", "q95 = np.percentile(softclips, 99) plt.plot([q95, q95], [0, 1.0], \"--k\") m", "clip_left = get_clip(rec.cigartuples[0]) if clip_left is not None: softclips.append([clip_left, True,", "continue is_first_read = rec.pos < rec.mpos get_clip = lambda c:", "== 10000000: break softclips = pd.DataFrame(softclips, columns=[\"len\", \"left\", \"first_in_pair\"]) def", "plt.plot([m, m], [0, 1.0], \":k\") plt.text(m, 1, \"max={}\".format(m), horizontalalignment=\"right\", verticalalignment=\"top\")", "= pysam.AlignmentFile(snakemake.input[0], \"rb\") normal = pysam.AlignmentFile(snakemake.input[1], \"rb\") softclips = []", "c[0] == 4 else None clip_left = get_clip(rec.cigartuples[0]) if clip_left", "= [] for i, rec in enumerate(normal): if rec.is_supplementary or", "else None clip_left = get_clip(rec.cigartuples[0]) if clip_left is not None:", "\"first_in_pair\"]) def plot(*args, **kwargs): softclips = args[0] plt.hist(softclips, normed=True) q95", "= max(softclips) plt.plot([m, m], [0, 1.0], \":k\") plt.text(m, 1, \"max={}\".format(m),", "as pd import pysam import matplotlib matplotlib.use(\"agg\") import matplotlib.pyplot as", "enumerate(normal): if rec.is_supplementary or rec.is_unmapped: continue is_first_read = rec.pos <", "plt.hist(softclips, normed=True) q95 = np.percentile(softclips, 99) plt.plot([q95, q95], [0, 1.0],", "plt.plot([q95, q95], [0, 1.0], \"--k\") m = max(softclips) plt.plot([m, m],", "matplotlib.use(\"agg\") import matplotlib.pyplot as plt import seaborn as sns from", "\"left\", \"first_in_pair\"]) def plot(*args, **kwargs): softclips = args[0] plt.hist(softclips, normed=True)", "\"rb\") normal = pysam.AlignmentFile(snakemake.input[1], \"rb\") softclips = [] for i,", "np.percentile(softclips, 99) plt.plot([q95, q95], [0, 1.0], \"--k\") m = max(softclips)", "import pandas as pd import pysam import matplotlib matplotlib.use(\"agg\") import", "partial tumor = pysam.AlignmentFile(snakemake.input[0], \"rb\") normal = pysam.AlignmentFile(snakemake.input[1], \"rb\") softclips", "q95], [0, 1.0], \"--k\") m = max(softclips) plt.plot([m, m], [0,", "if c[0] == 4 else None clip_left = get_clip(rec.cigartuples[0]) if", "< rec.mpos get_clip = lambda c: c[1] if c[0] ==", "softclips.append([clip_right, False, is_first_read]) if i == 10000000: break softclips =", "softclips = [] for i, rec in enumerate(normal): if rec.is_supplementary", "False, is_first_read]) if i == 10000000: break softclips = pd.DataFrame(softclips,", "= np.percentile(softclips, 99) plt.plot([q95, q95], [0, 1.0], \"--k\") m =", "[0, 1.0], \"--k\") m = max(softclips) plt.plot([m, m], [0, 1.0],", "normal = pysam.AlignmentFile(snakemake.input[1], \"rb\") softclips = [] for i, rec", "from functools import partial tumor = pysam.AlignmentFile(snakemake.input[0], \"rb\") normal =", "m = max(softclips) plt.plot([m, m], [0, 1.0], \":k\") plt.text(m, 1,", "python import sys import numpy as np import pandas as", "softclips.append([clip_left, True, is_first_read]) clip_right = get_clip(rec.cigartuples[-1]) if clip_right is not", "= args[0] plt.hist(softclips, normed=True) q95 = np.percentile(softclips, 99) plt.plot([q95, q95],", "in enumerate(normal): if rec.is_supplementary or rec.is_unmapped: continue is_first_read = rec.pos", "= pd.DataFrame(softclips, columns=[\"len\", \"left\", \"first_in_pair\"]) def plot(*args, **kwargs): softclips =", "\":k\") plt.text(m, 1, \"max={}\".format(m), horizontalalignment=\"right\", verticalalignment=\"top\") g = sns.FacetGrid(softclips, col=\"left\",", "\"max={}\".format(m), horizontalalignment=\"right\", verticalalignment=\"top\") g = sns.FacetGrid(softclips, col=\"left\", row=\"first_in_pair\") g =", "get_clip = lambda c: c[1] if c[0] == 4 else", "True, is_first_read]) clip_right = get_clip(rec.cigartuples[-1]) if clip_right is not None:", "import sys import numpy as np import pandas as pd" ]
[ "import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('rss_feeder_api', '0002_feed_subtitle'), ]", "), migrations.AlterModelOptions( name='feed', options={'ordering': ('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'}, ),", "] operations = [ migrations.AlterModelOptions( name='entry', options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'},", "migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('rss_feeder_api',", "Django 3.1 on 2020-08-13 16:23 from django.db import migrations, models", "('-updated_at',), 'verbose_name_plural': 'entries'}, ), migrations.AlterModelOptions( name='feed', options={'ordering': ('-updated_at',), 'verbose_name': 'Feed',", "class Migration(migrations.Migration): dependencies = [ ('rss_feeder_api', '0002_feed_subtitle'), ] operations =", "django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies =", "Generated by Django 3.1 on 2020-08-13 16:23 from django.db import", "name='feed', options={'ordering': ('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'}, ), migrations.AddField( model_name='entry',", "Migration(migrations.Migration): dependencies = [ ('rss_feeder_api', '0002_feed_subtitle'), ] operations = [", "'verbose_name_plural': 'Feeds'}, ), migrations.AddField( model_name='entry', name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ),", "from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies", "'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'}, ), migrations.AddField( model_name='entry', name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),", "<reponame>RSaab/rss-scraper<gh_stars>0 # Generated by Django 3.1 on 2020-08-13 16:23 from", "# Generated by Django 3.1 on 2020-08-13 16:23 from django.db", "'Feed', 'verbose_name_plural': 'Feeds'}, ), migrations.AddField( model_name='entry', name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False,", "migrations.AddField( model_name='entry', name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( model_name='entry', name='updated_at',", "name='entry', options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'}, ), migrations.AlterModelOptions( name='feed', options={'ordering': ('-updated_at',),", "[ migrations.AlterModelOptions( name='entry', options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'}, ), migrations.AlterModelOptions( name='feed',", "'entries'}, ), migrations.AlterModelOptions( name='feed', options={'ordering': ('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'},", "name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( model_name='entry', name='updated_at', field=models.DateTimeField(auto_now=True), ),", "2020-08-13 16:23 from django.db import migrations, models import django.utils.timezone class", "options={'ordering': ('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'}, ), migrations.AddField( model_name='entry', name='created_at',", "), migrations.AddField( model_name='entry', name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( model_name='entry',", "= [ ('rss_feeder_api', '0002_feed_subtitle'), ] operations = [ migrations.AlterModelOptions( name='entry',", "migrations.AlterModelOptions( name='entry', options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'}, ), migrations.AlterModelOptions( name='feed', options={'ordering':", "preserve_default=False, ), migrations.AddField( model_name='entry', name='updated_at', field=models.DateTimeField(auto_now=True), ), migrations.AlterUniqueTogether( name='entry', unique_together={('guid',)},", "= [ migrations.AlterModelOptions( name='entry', options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'}, ), migrations.AlterModelOptions(", "('rss_feeder_api', '0002_feed_subtitle'), ] operations = [ migrations.AlterModelOptions( name='entry', options={'ordering': ('-updated_at',),", "by Django 3.1 on 2020-08-13 16:23 from django.db import migrations,", "), migrations.AddField( model_name='entry', name='updated_at', field=models.DateTimeField(auto_now=True), ), migrations.AlterUniqueTogether( name='entry', unique_together={('guid',)}, ),", "operations = [ migrations.AlterModelOptions( name='entry', options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'}, ),", "3.1 on 2020-08-13 16:23 from django.db import migrations, models import", "on 2020-08-13 16:23 from django.db import migrations, models import django.utils.timezone", "'0002_feed_subtitle'), ] operations = [ migrations.AlterModelOptions( name='entry', options={'ordering': ('-updated_at',), 'verbose_name_plural':", "import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [", "[ ('rss_feeder_api', '0002_feed_subtitle'), ] operations = [ migrations.AlterModelOptions( name='entry', options={'ordering':", "'verbose_name_plural': 'entries'}, ), migrations.AlterModelOptions( name='feed', options={'ordering': ('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural':", "django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('rss_feeder_api', '0002_feed_subtitle'), ] operations", "'Feeds'}, ), migrations.AddField( model_name='entry', name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField(", "models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('rss_feeder_api', '0002_feed_subtitle'),", "migrations.AddField( model_name='entry', name='updated_at', field=models.DateTimeField(auto_now=True), ), migrations.AlterUniqueTogether( name='entry', unique_together={('guid',)}, ), ]", "default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( model_name='entry', name='updated_at', field=models.DateTimeField(auto_now=True), ), migrations.AlterUniqueTogether( name='entry',", "model_name='entry', name='created_at', field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( model_name='entry', name='updated_at', field=models.DateTimeField(auto_now=True),", "options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'}, ), migrations.AlterModelOptions( name='feed', options={'ordering': ('-updated_at',), 'verbose_name':", "dependencies = [ ('rss_feeder_api', '0002_feed_subtitle'), ] operations = [ migrations.AlterModelOptions(", "('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'}, ), migrations.AddField( model_name='entry', name='created_at', field=models.DateTimeField(auto_now_add=True,", "migrations.AlterModelOptions( name='feed', options={'ordering': ('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'}, ), migrations.AddField(", "16:23 from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration):", "field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), preserve_default=False, ), migrations.AddField( model_name='entry', name='updated_at', field=models.DateTimeField(auto_now=True), ), migrations.AlterUniqueTogether(" ]
[ "v2 has BN-ReLU before Pooling x = BatchNormalization()(x) x =", "res_block == 0: # first layer and first stage activation", "y = resnet_layer(inputs=y, num_filters=num_filters_in, conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_out, kernel_size=1,", "dimensions activation (string): activation name batch_normalization (bool): whether to include", "layer but not first stage strides = 2 # downsample", "for stage in range(3): for res_block in range(num_res_blocks): activation =", "projection residual shortcut connection to match # changed dims x", "= tensorflow.keras.layers.Conv2D AveragePooling2D = keras.layers.AveragePooling2D Input=keras.layers.Input l2=keras.regularizers.l2 from tensorflow.keras import", "model. model = Model(inputs=inputs, outputs=outputs) return model, inputs, outputs, logits,", "include batch normalization conv_first (bool): conv-bn-activation (True) or bn-activation-conv (False)", "of filters kernel_size (int): Conv2D square kernel dimensions strides (int):", "= keras.layers.Flatten BatchNormalization= keras.layers.BatchNormalization Conv2D = tensorflow.keras.layers.Conv2D AveragePooling2D = keras.layers.AveragePooling2D", "kernel_size=3, strides=1, activation='relu', batch_normalization=True, conv_first=True): \"\"\"2D Convolution-Batch Normalization-Activation stack builder", "top. # v2 has BN-ReLU before Pooling x = BatchNormalization()(x)", "before Pooling x = BatchNormalization()(x) x = Activation('relu')(x) x =", "= keras.layers.AveragePooling2D Input=keras.layers.Input l2=keras.regularizers.l2 from tensorflow.keras import backend def resnet_layer(inputs,", "conv_first: x = conv(x) if batch_normalization: x = BatchNormalization()(x) if", "next layer \"\"\" conv = Conv2D( num_filters, kernel_size=kernel_size, strides=strides, padding='same',", "range(num_res_blocks): activation = 'relu' batch_normalization = True strides = 1", "x = conv(x) if batch_normalization: x = BatchNormalization()(x) if activation", "= resnet_layer(inputs=inputs, num_filters=num_filters_in, conv_first=True) # Instantiate the stack of residual", "activation name batch_normalization (bool): whether to include batch normalization conv_first", "x = Activation('relu')(x) x = AveragePooling2D(pool_size=8)(x) final_features = Flatten()(x) logits", "(tensor): input tensor from input image or previous layer num_filters", "strides=strides, activation=None, batch_normalization=False) x = tensorflow.keras.layers.add([x, y]) num_filters_in = num_filters_out", "# downsample # bottleneck residual unit y = resnet_layer(inputs=x, num_filters=num_filters_in,", "kernel_initializer='he_normal', kernel_regularizer=l2(1e-4)) x = inputs if conv_first: x = conv(x)", "has BN-ReLU before Pooling x = BatchNormalization()(x) x = Activation('relu')(x)", "AveragePooling2D(pool_size=8)(x) final_features = Flatten()(x) logits = Dense(num_classes, kernel_initializer='he_normal')(final_features) outputs =", "resnet_v2(input, complexityParameter, num_classes=10, dataset='cifar10'): depth = complexityParameter * 9 +", "stage strides = 2 # downsample # bottleneck residual unit", "activation (string): activation name batch_normalization (bool): whether to include batch", "= int((depth - 2) / 9) inputs = input x", "x = resnet_layer(inputs=inputs, num_filters=num_filters_in, conv_first=True) # Instantiate the stack of", "Instantiate the stack of residual units for stage in range(3):", "strides = 2 # downsample # bottleneck residual unit y", "kernel_regularizer=l2(1e-4)) x = inputs if conv_first: x = conv(x) if", "units for stage in range(3): for res_block in range(num_res_blocks): activation", "BatchNormalization()(x) if activation is not None: x = Activation(activation)(x) x", "first layer and first stage activation = None batch_normalization =", "to the next layer \"\"\" conv = Conv2D( num_filters, kernel_size=kernel_size,", "x = inputs if conv_first: x = conv(x) if batch_normalization:", "bottleneck residual unit y = resnet_layer(inputs=x, num_filters=num_filters_in, kernel_size=1, strides=strides, activation=activation,", "== 0: num_filters_out = num_filters_in * 4 if res_block ==", "definition. num_filters_in = 16 num_res_blocks = int((depth - 2) /", "= None batch_normalization = False else: num_filters_out = num_filters_in *", "return x def resnet_v2(input, complexityParameter, num_classes=10, dataset='cifar10'): depth = complexityParameter", "# v2 has BN-ReLU before Pooling x = BatchNormalization()(x) x", "num_filters=num_filters_in, conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_out, kernel_size=1, conv_first=False) if res_block", "kernel_size=1, strides=strides, activation=None, batch_normalization=False) x = tensorflow.keras.layers.add([x, y]) num_filters_in =", "if activation is not None: x = Activation(activation)(x) else: if", "from tensorflow import keras Model = keras.models.Model Dense = keras.layers.Dense", "not None: x = Activation(activation)(x) x = conv(x) return x", "match # changed dims x = resnet_layer(inputs=x, num_filters=num_filters_out, kernel_size=1, strides=strides,", "110 in [b])') # Start model definition. num_filters_in = 16", "2 # downsample # bottleneck residual unit y = resnet_layer(inputs=x,", "y]) num_filters_in = num_filters_out # Add classifier on top. #", "Instantiate model. model = Model(inputs=inputs, outputs=outputs) return model, inputs, outputs,", "9 + 2 if (depth - 2) % 9 !=", "(int): Conv2D square stride dimensions activation (string): activation name batch_normalization", "batch normalization conv_first (bool): conv-bn-activation (True) or bn-activation-conv (False) #", "= 16 num_res_blocks = int((depth - 2) / 9) inputs", "= num_filters_in * 2 if res_block == 0: # first", "num_filters_in = num_filters_out # Add classifier on top. # v2", "Input=keras.layers.Input l2=keras.regularizers.l2 from tensorflow.keras import backend def resnet_layer(inputs, num_filters=16, kernel_size=3,", "True strides = 1 if stage == 0: num_filters_out =", "= Activation('relu')(x) x = AveragePooling2D(pool_size=8)(x) final_features = Flatten()(x) logits =", "x = Activation(activation)(x) else: if batch_normalization: x = BatchNormalization()(x) if", "num_filters=num_filters_out, kernel_size=1, conv_first=False) if res_block == 0: # linear projection", "else: if batch_normalization: x = BatchNormalization()(x) if activation is not", "= resnet_layer(inputs=y, num_filters=num_filters_in, conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_out, kernel_size=1, conv_first=False)", "activation=None, batch_normalization=False) x = tensorflow.keras.layers.add([x, y]) num_filters_in = num_filters_out #", "Conv2D square stride dimensions activation (string): activation name batch_normalization (bool):", "x (tensor): tensor as input to the next layer \"\"\"", "if stage == 0: num_filters_out = num_filters_in * 4 if", "the next layer \"\"\" conv = Conv2D( num_filters, kernel_size=kernel_size, strides=strides,", "# Instantiate model. model = Model(inputs=inputs, outputs=outputs) return model, inputs,", "should be 9n+2 (eg 56 or 110 in [b])') #", "tensorflow.keras import backend def resnet_layer(inputs, num_filters=16, kernel_size=3, strides=1, activation='relu', batch_normalization=True,", "or previous layer num_filters (int): Conv2D number of filters kernel_size", "of residual units for stage in range(3): for res_block in", "residual shortcut connection to match # changed dims x =", "+ 2 if (depth - 2) % 9 != 0:", "range(3): for res_block in range(num_res_blocks): activation = 'relu' batch_normalization =", "Conv2D = tensorflow.keras.layers.Conv2D AveragePooling2D = keras.layers.AveragePooling2D Input=keras.layers.Input l2=keras.regularizers.l2 from tensorflow.keras", "y = resnet_layer(inputs=x, num_filters=num_filters_in, kernel_size=1, strides=strides, activation=activation, batch_normalization=batch_normalization, conv_first=False) y", "if activation is not None: x = Activation(activation)(x) x =", "from input image or previous layer num_filters (int): Conv2D number", "batch_normalization=True, conv_first=True): \"\"\"2D Convolution-Batch Normalization-Activation stack builder # Arguments inputs", "16 num_res_blocks = int((depth - 2) / 9) inputs =", "resnet_layer(inputs=x, num_filters=num_filters_in, kernel_size=1, strides=strides, activation=activation, batch_normalization=batch_normalization, conv_first=False) y = resnet_layer(inputs=y,", "BatchNormalization()(x) if activation is not None: x = Activation(activation)(x) else:", "(eg 56 or 110 in [b])') # Start model definition.", "Conv2D( num_filters, kernel_size=kernel_size, strides=strides, padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(1e-4)) x = inputs", "conv(x) if batch_normalization: x = BatchNormalization()(x) if activation is not", "stack of residual units for stage in range(3): for res_block", "= resnet_layer(inputs=y, num_filters=num_filters_out, kernel_size=1, conv_first=False) if res_block == 0: #", "x = BatchNormalization()(x) x = Activation('relu')(x) x = AveragePooling2D(pool_size=8)(x) final_features", "= conv(x) return x def resnet_v2(input, complexityParameter, num_classes=10, dataset='cifar10'): depth", "= keras.layers.Dense Activation = keras.layers.Activation Flatten = keras.layers.Flatten BatchNormalization= keras.layers.BatchNormalization", "strides=1, activation='relu', batch_normalization=True, conv_first=True): \"\"\"2D Convolution-Batch Normalization-Activation stack builder #", "= False else: num_filters_out = num_filters_in * 2 if res_block", "square kernel dimensions strides (int): Conv2D square stride dimensions activation", "AveragePooling2D = keras.layers.AveragePooling2D Input=keras.layers.Input l2=keras.regularizers.l2 from tensorflow.keras import backend def", "first stage strides = 2 # downsample # bottleneck residual", "the stack of residual units for stage in range(3): for", "= Activation('softmax')(logits) # Instantiate model. model = Model(inputs=inputs, outputs=outputs) return", "num_filters_out = num_filters_in * 4 if res_block == 0: #", "raise ValueError('depth should be 9n+2 (eg 56 or 110 in", "conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_out, kernel_size=1, conv_first=False) if res_block ==", "depth = complexityParameter * 9 + 2 if (depth -", "num_filters, kernel_size=kernel_size, strides=strides, padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(1e-4)) x = inputs if", "\"\"\" conv = Conv2D( num_filters, kernel_size=kernel_size, strides=strides, padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(1e-4))", "def resnet_v2(input, complexityParameter, num_classes=10, dataset='cifar10'): depth = complexityParameter * 9", "filters kernel_size (int): Conv2D square kernel dimensions strides (int): Conv2D", "stride dimensions activation (string): activation name batch_normalization (bool): whether to", "shortcut connection to match # changed dims x = resnet_layer(inputs=x,", "backend def resnet_layer(inputs, num_filters=16, kernel_size=3, strides=1, activation='relu', batch_normalization=True, conv_first=True): \"\"\"2D", "Returns x (tensor): tensor as input to the next layer", "!= 0: raise ValueError('depth should be 9n+2 (eg 56 or", "input to the next layer \"\"\" conv = Conv2D( num_filters,", "is not None: x = Activation(activation)(x) x = conv(x) return", "activation is not None: x = Activation(activation)(x) else: if batch_normalization:", "kernel dimensions strides (int): Conv2D square stride dimensions activation (string):", "= True strides = 1 if stage == 0: num_filters_out", "num_filters_in * 2 if res_block == 0: # first layer", "= input x = resnet_layer(inputs=inputs, num_filters=num_filters_in, conv_first=True) # Instantiate the", "0: # first layer and first stage activation = None", "9n+2 (eg 56 or 110 in [b])') # Start model", "in [b])') # Start model definition. num_filters_in = 16 num_res_blocks", "num_filters=num_filters_in, conv_first=True) # Instantiate the stack of residual units for", "* 2 if res_block == 0: # first layer but", "l2=keras.regularizers.l2 from tensorflow.keras import backend def resnet_layer(inputs, num_filters=16, kernel_size=3, strides=1,", "[b])') # Start model definition. num_filters_in = 16 num_res_blocks =", "keras.layers.BatchNormalization Conv2D = tensorflow.keras.layers.Conv2D AveragePooling2D = keras.layers.AveragePooling2D Input=keras.layers.Input l2=keras.regularizers.l2 from", "9) inputs = input x = resnet_layer(inputs=inputs, num_filters=num_filters_in, conv_first=True) #", "square stride dimensions activation (string): activation name batch_normalization (bool): whether", "normalization conv_first (bool): conv-bn-activation (True) or bn-activation-conv (False) # Returns", "# Instantiate the stack of residual units for stage in", "if res_block == 0: # linear projection residual shortcut connection", "first layer but not first stage strides = 2 #", "but not first stage strides = 2 # downsample #", "classifier on top. # v2 has BN-ReLU before Pooling x", "kernel_initializer='he_normal')(final_features) outputs = Activation('softmax')(logits) # Instantiate model. model = Model(inputs=inputs,", "stage == 0: num_filters_out = num_filters_in * 4 if res_block", "num_filters_in = 16 num_res_blocks = int((depth - 2) / 9)", "for res_block in range(num_res_blocks): activation = 'relu' batch_normalization = True", "9 != 0: raise ValueError('depth should be 9n+2 (eg 56", "56 or 110 in [b])') # Start model definition. num_filters_in", "= AveragePooling2D(pool_size=8)(x) final_features = Flatten()(x) logits = Dense(num_classes, kernel_initializer='he_normal')(final_features) outputs", "conv-bn-activation (True) or bn-activation-conv (False) # Returns x (tensor): tensor", "= Conv2D( num_filters, kernel_size=kernel_size, strides=strides, padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(1e-4)) x =", "int((depth - 2) / 9) inputs = input x =", "inputs if conv_first: x = conv(x) if batch_normalization: x =", "batch_normalization=False) x = tensorflow.keras.layers.add([x, y]) num_filters_in = num_filters_out # Add", "num_filters_out # Add classifier on top. # v2 has BN-ReLU", "- 2) % 9 != 0: raise ValueError('depth should be", "# first layer but not first stage strides = 2", "= resnet_layer(inputs=x, num_filters=num_filters_out, kernel_size=1, strides=strides, activation=None, batch_normalization=False) x = tensorflow.keras.layers.add([x,", "= num_filters_in * 4 if res_block == 0: # first", "= inputs if conv_first: x = conv(x) if batch_normalization: x", "Flatten = keras.layers.Flatten BatchNormalization= keras.layers.BatchNormalization Conv2D = tensorflow.keras.layers.Conv2D AveragePooling2D =", "num_res_blocks = int((depth - 2) / 9) inputs = input", "conv(x) return x def resnet_v2(input, complexityParameter, num_classes=10, dataset='cifar10'): depth =", "not None: x = Activation(activation)(x) else: if batch_normalization: x =", "= complexityParameter * 9 + 2 if (depth - 2)", "input tensor from input image or previous layer num_filters (int):", "(string): activation name batch_normalization (bool): whether to include batch normalization", "2 if res_block == 0: # first layer but not", "complexityParameter, num_classes=10, dataset='cifar10'): depth = complexityParameter * 9 + 2", "Conv2D square kernel dimensions strides (int): Conv2D square stride dimensions", "keras.models.Model Dense = keras.layers.Dense Activation = keras.layers.Activation Flatten = keras.layers.Flatten", "is not None: x = Activation(activation)(x) else: if batch_normalization: x", "= 1 if stage == 0: num_filters_out = num_filters_in *", "Dense = keras.layers.Dense Activation = keras.layers.Activation Flatten = keras.layers.Flatten BatchNormalization=", "in range(3): for res_block in range(num_res_blocks): activation = 'relu' batch_normalization", "Dense(num_classes, kernel_initializer='he_normal')(final_features) outputs = Activation('softmax')(logits) # Instantiate model. model =", "activation='relu', batch_normalization=True, conv_first=True): \"\"\"2D Convolution-Batch Normalization-Activation stack builder # Arguments", "Pooling x = BatchNormalization()(x) x = Activation('relu')(x) x = AveragePooling2D(pool_size=8)(x)", "x = BatchNormalization()(x) if activation is not None: x =", "keras Model = keras.models.Model Dense = keras.layers.Dense Activation = keras.layers.Activation", "num_filters=16, kernel_size=3, strides=1, activation='relu', batch_normalization=True, conv_first=True): \"\"\"2D Convolution-Batch Normalization-Activation stack", "builder # Arguments inputs (tensor): input tensor from input image", "None batch_normalization = False else: num_filters_out = num_filters_in * 2", "num_classes=10, dataset='cifar10'): depth = complexityParameter * 9 + 2 if", "import keras Model = keras.models.Model Dense = keras.layers.Dense Activation =", "(False) # Returns x (tensor): tensor as input to the", "False else: num_filters_out = num_filters_in * 2 if res_block ==", "Activation('relu')(x) x = AveragePooling2D(pool_size=8)(x) final_features = Flatten()(x) logits = Dense(num_classes,", "= num_filters_out # Add classifier on top. # v2 has", "x = resnet_layer(inputs=x, num_filters=num_filters_out, kernel_size=1, strides=strides, activation=None, batch_normalization=False) x =", "strides = 1 if stage == 0: num_filters_out = num_filters_in", "= 'relu' batch_normalization = True strides = 1 if stage", "= Flatten()(x) logits = Dense(num_classes, kernel_initializer='he_normal')(final_features) outputs = Activation('softmax')(logits) #", "num_filters (int): Conv2D number of filters kernel_size (int): Conv2D square", "x = AveragePooling2D(pool_size=8)(x) final_features = Flatten()(x) logits = Dense(num_classes, kernel_initializer='he_normal')(final_features)", "(int): Conv2D number of filters kernel_size (int): Conv2D square kernel", "Activation(activation)(x) else: if batch_normalization: x = BatchNormalization()(x) if activation is", "/ 9) inputs = input x = resnet_layer(inputs=inputs, num_filters=num_filters_in, conv_first=True)", "\"\"\"2D Convolution-Batch Normalization-Activation stack builder # Arguments inputs (tensor): input", "or bn-activation-conv (False) # Returns x (tensor): tensor as input", "import tensorflow from tensorflow import keras Model = keras.models.Model Dense", "batch_normalization = False else: num_filters_out = num_filters_in * 2 if", "outputs = Activation('softmax')(logits) # Instantiate model. model = Model(inputs=inputs, outputs=outputs)", "residual units for stage in range(3): for res_block in range(num_res_blocks):", "model = Model(inputs=inputs, outputs=outputs) return model, inputs, outputs, logits, final_features", "strides (int): Conv2D square stride dimensions activation (string): activation name", "tensorflow import keras Model = keras.models.Model Dense = keras.layers.Dense Activation", "Add classifier on top. # v2 has BN-ReLU before Pooling", "conv_first (bool): conv-bn-activation (True) or bn-activation-conv (False) # Returns x", "tensorflow.keras.layers.add([x, y]) num_filters_in = num_filters_out # Add classifier on top.", "# Add classifier on top. # v2 has BN-ReLU before", "Conv2D number of filters kernel_size (int): Conv2D square kernel dimensions", "resnet_layer(inputs, num_filters=16, kernel_size=3, strides=1, activation='relu', batch_normalization=True, conv_first=True): \"\"\"2D Convolution-Batch Normalization-Activation", "kernel_size (int): Conv2D square kernel dimensions strides (int): Conv2D square", "= Activation(activation)(x) x = conv(x) return x def resnet_v2(input, complexityParameter,", "# Returns x (tensor): tensor as input to the next", "import backend def resnet_layer(inputs, num_filters=16, kernel_size=3, strides=1, activation='relu', batch_normalization=True, conv_first=True):", "BatchNormalization()(x) x = Activation('relu')(x) x = AveragePooling2D(pool_size=8)(x) final_features = Flatten()(x)", "* 9 + 2 if (depth - 2) % 9", "resnet_layer(inputs=y, num_filters=num_filters_in, conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_out, kernel_size=1, conv_first=False) if", "final_features = Flatten()(x) logits = Dense(num_classes, kernel_initializer='he_normal')(final_features) outputs = Activation('softmax')(logits)", "= 2 # downsample # bottleneck residual unit y =", "input x = resnet_layer(inputs=inputs, num_filters=num_filters_in, conv_first=True) # Instantiate the stack", "dims x = resnet_layer(inputs=x, num_filters=num_filters_out, kernel_size=1, strides=strides, activation=None, batch_normalization=False) x", "activation = None batch_normalization = False else: num_filters_out = num_filters_in", "name batch_normalization (bool): whether to include batch normalization conv_first (bool):", "# Arguments inputs (tensor): input tensor from input image or", "connection to match # changed dims x = resnet_layer(inputs=x, num_filters=num_filters_out,", "keras.layers.Dense Activation = keras.layers.Activation Flatten = keras.layers.Flatten BatchNormalization= keras.layers.BatchNormalization Conv2D", "layer num_filters (int): Conv2D number of filters kernel_size (int): Conv2D", "* 4 if res_block == 0: # first layer and", "0: num_filters_out = num_filters_in * 4 if res_block == 0:", "if batch_normalization: x = BatchNormalization()(x) if activation is not None:", "tensorflow from tensorflow import keras Model = keras.models.Model Dense =", "(depth - 2) % 9 != 0: raise ValueError('depth should", "== 0: # first layer and first stage activation =", "logits = Dense(num_classes, kernel_initializer='he_normal')(final_features) outputs = Activation('softmax')(logits) # Instantiate model.", "bn-activation-conv (False) # Returns x (tensor): tensor as input to", "0: # first layer but not first stage strides =", "num_filters_in * 4 if res_block == 0: # first layer", "(int): Conv2D square kernel dimensions strides (int): Conv2D square stride", "strides=strides, padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(1e-4)) x = inputs if conv_first: x", "conv = Conv2D( num_filters, kernel_size=kernel_size, strides=strides, padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(1e-4)) x", "== 0: # first layer but not first stage strides", "keras.layers.Flatten BatchNormalization= keras.layers.BatchNormalization Conv2D = tensorflow.keras.layers.Conv2D AveragePooling2D = keras.layers.AveragePooling2D Input=keras.layers.Input", "input image or previous layer num_filters (int): Conv2D number of", "Convolution-Batch Normalization-Activation stack builder # Arguments inputs (tensor): input tensor", "activation = 'relu' batch_normalization = True strides = 1 if", "x def resnet_v2(input, complexityParameter, num_classes=10, dataset='cifar10'): depth = complexityParameter *", "else: num_filters_out = num_filters_in * 2 if res_block == 0:", "Model = keras.models.Model Dense = keras.layers.Dense Activation = keras.layers.Activation Flatten", "x = tensorflow.keras.layers.add([x, y]) num_filters_in = num_filters_out # Add classifier", "'relu' batch_normalization = True strides = 1 if stage ==", "as input to the next layer \"\"\" conv = Conv2D(", "batch_normalization = True strides = 1 if stage == 0:", "y = resnet_layer(inputs=y, num_filters=num_filters_out, kernel_size=1, conv_first=False) if res_block == 0:", "resnet_layer(inputs=y, num_filters=num_filters_out, kernel_size=1, conv_first=False) if res_block == 0: # linear", "= conv(x) if batch_normalization: x = BatchNormalization()(x) if activation is", "and first stage activation = None batch_normalization = False else:", "4 if res_block == 0: # first layer and first", "image or previous layer num_filters (int): Conv2D number of filters", "if res_block == 0: # first layer and first stage", "linear projection residual shortcut connection to match # changed dims", "number of filters kernel_size (int): Conv2D square kernel dimensions strides", "batch_normalization (bool): whether to include batch normalization conv_first (bool): conv-bn-activation", "tensor from input image or previous layer num_filters (int): Conv2D", "== 0: # linear projection residual shortcut connection to match", "complexityParameter * 9 + 2 if (depth - 2) %", "= BatchNormalization()(x) x = Activation('relu')(x) x = AveragePooling2D(pool_size=8)(x) final_features =", "kernel_size=1, strides=strides, activation=activation, batch_normalization=batch_normalization, conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_in, conv_first=False)", "keras.layers.AveragePooling2D Input=keras.layers.Input l2=keras.regularizers.l2 from tensorflow.keras import backend def resnet_layer(inputs, num_filters=16,", "Normalization-Activation stack builder # Arguments inputs (tensor): input tensor from", "= BatchNormalization()(x) if activation is not None: x = Activation(activation)(x)", "num_filters_out = num_filters_in * 2 if res_block == 0: #", "or 110 in [b])') # Start model definition. num_filters_in =", "unit y = resnet_layer(inputs=x, num_filters=num_filters_in, kernel_size=1, strides=strides, activation=activation, batch_normalization=batch_normalization, conv_first=False)", "Activation(activation)(x) x = conv(x) return x def resnet_v2(input, complexityParameter, num_classes=10,", "tensorflow.keras.layers.Conv2D AveragePooling2D = keras.layers.AveragePooling2D Input=keras.layers.Input l2=keras.regularizers.l2 from tensorflow.keras import backend", "dimensions strides (int): Conv2D square stride dimensions activation (string): activation", "kernel_size=kernel_size, strides=strides, padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(1e-4)) x = inputs if conv_first:", "tensor as input to the next layer \"\"\" conv =", "= resnet_layer(inputs=x, num_filters=num_filters_in, kernel_size=1, strides=strides, activation=activation, batch_normalization=batch_normalization, conv_first=False) y =", "(bool): conv-bn-activation (True) or bn-activation-conv (False) # Returns x (tensor):", "inputs = input x = resnet_layer(inputs=inputs, num_filters=num_filters_in, conv_first=True) # Instantiate", "0: raise ValueError('depth should be 9n+2 (eg 56 or 110", "= Activation(activation)(x) else: if batch_normalization: x = BatchNormalization()(x) if activation", "def resnet_layer(inputs, num_filters=16, kernel_size=3, strides=1, activation='relu', batch_normalization=True, conv_first=True): \"\"\"2D Convolution-Batch", "# linear projection residual shortcut connection to match # changed", "layer \"\"\" conv = Conv2D( num_filters, kernel_size=kernel_size, strides=strides, padding='same', kernel_initializer='he_normal',", "(tensor): tensor as input to the next layer \"\"\" conv", "stage in range(3): for res_block in range(num_res_blocks): activation = 'relu'", "if (depth - 2) % 9 != 0: raise ValueError('depth", "BN-ReLU before Pooling x = BatchNormalization()(x) x = Activation('relu')(x) x", "Start model definition. num_filters_in = 16 num_res_blocks = int((depth -", "= tensorflow.keras.layers.add([x, y]) num_filters_in = num_filters_out # Add classifier on", "resnet_layer(inputs=inputs, num_filters=num_filters_in, conv_first=True) # Instantiate the stack of residual units", "changed dims x = resnet_layer(inputs=x, num_filters=num_filters_out, kernel_size=1, strides=strides, activation=None, batch_normalization=False)", "stack builder # Arguments inputs (tensor): input tensor from input", "# bottleneck residual unit y = resnet_layer(inputs=x, num_filters=num_filters_in, kernel_size=1, strides=strides,", "first stage activation = None batch_normalization = False else: num_filters_out", "# Start model definition. num_filters_in = 16 num_res_blocks = int((depth", "BatchNormalization= keras.layers.BatchNormalization Conv2D = tensorflow.keras.layers.Conv2D AveragePooling2D = keras.layers.AveragePooling2D Input=keras.layers.Input l2=keras.regularizers.l2", "num_filters=num_filters_in, kernel_size=1, strides=strides, activation=activation, batch_normalization=batch_normalization, conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_in,", "conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_in, conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_out,", "batch_normalization: x = BatchNormalization()(x) if activation is not None: x", "0: # linear projection residual shortcut connection to match #", "num_filters=num_filters_out, kernel_size=1, strides=strides, activation=None, batch_normalization=False) x = tensorflow.keras.layers.add([x, y]) num_filters_in", "x = conv(x) return x def resnet_v2(input, complexityParameter, num_classes=10, dataset='cifar10'):", "ValueError('depth should be 9n+2 (eg 56 or 110 in [b])')", "res_block == 0: # linear projection residual shortcut connection to", "= keras.models.Model Dense = keras.layers.Dense Activation = keras.layers.Activation Flatten =", "dataset='cifar10'): depth = complexityParameter * 9 + 2 if (depth", "activation is not None: x = Activation(activation)(x) x = conv(x)", "# changed dims x = resnet_layer(inputs=x, num_filters=num_filters_out, kernel_size=1, strides=strides, activation=None,", "(True) or bn-activation-conv (False) # Returns x (tensor): tensor as", "stage activation = None batch_normalization = False else: num_filters_out =", "previous layer num_filters (int): Conv2D number of filters kernel_size (int):", "to include batch normalization conv_first (bool): conv-bn-activation (True) or bn-activation-conv", "activation=activation, batch_normalization=batch_normalization, conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_in, conv_first=False) y =", "2 if (depth - 2) % 9 != 0: raise", "Activation = keras.layers.Activation Flatten = keras.layers.Flatten BatchNormalization= keras.layers.BatchNormalization Conv2D =", "if conv_first: x = conv(x) if batch_normalization: x = BatchNormalization()(x)", "res_block in range(num_res_blocks): activation = 'relu' batch_normalization = True strides", "if res_block == 0: # first layer but not first", "conv_first=True) # Instantiate the stack of residual units for stage", "1 if stage == 0: num_filters_out = num_filters_in * 4", "(bool): whether to include batch normalization conv_first (bool): conv-bn-activation (True)", "- 2) / 9) inputs = input x = resnet_layer(inputs=inputs,", "downsample # bottleneck residual unit y = resnet_layer(inputs=x, num_filters=num_filters_in, kernel_size=1,", "2) % 9 != 0: raise ValueError('depth should be 9n+2", "on top. # v2 has BN-ReLU before Pooling x =", "# first layer and first stage activation = None batch_normalization", "= keras.layers.Activation Flatten = keras.layers.Flatten BatchNormalization= keras.layers.BatchNormalization Conv2D = tensorflow.keras.layers.Conv2D", "Arguments inputs (tensor): input tensor from input image or previous", "2) / 9) inputs = input x = resnet_layer(inputs=inputs, num_filters=num_filters_in,", "strides=strides, activation=activation, batch_normalization=batch_normalization, conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_in, conv_first=False) y", "layer and first stage activation = None batch_normalization = False", "None: x = Activation(activation)(x) x = conv(x) return x def", "model definition. num_filters_in = 16 num_res_blocks = int((depth - 2)", "batch_normalization=batch_normalization, conv_first=False) y = resnet_layer(inputs=y, num_filters=num_filters_in, conv_first=False) y = resnet_layer(inputs=y,", "keras.layers.Activation Flatten = keras.layers.Flatten BatchNormalization= keras.layers.BatchNormalization Conv2D = tensorflow.keras.layers.Conv2D AveragePooling2D", "from tensorflow.keras import backend def resnet_layer(inputs, num_filters=16, kernel_size=3, strides=1, activation='relu',", "padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(1e-4)) x = inputs if conv_first: x =", "conv_first=True): \"\"\"2D Convolution-Batch Normalization-Activation stack builder # Arguments inputs (tensor):", "Flatten()(x) logits = Dense(num_classes, kernel_initializer='he_normal')(final_features) outputs = Activation('softmax')(logits) # Instantiate", "inputs (tensor): input tensor from input image or previous layer", "x = Activation(activation)(x) x = conv(x) return x def resnet_v2(input,", "not first stage strides = 2 # downsample # bottleneck", "None: x = Activation(activation)(x) else: if batch_normalization: x = BatchNormalization()(x)", "whether to include batch normalization conv_first (bool): conv-bn-activation (True) or", "residual unit y = resnet_layer(inputs=x, num_filters=num_filters_in, kernel_size=1, strides=strides, activation=activation, batch_normalization=batch_normalization,", "kernel_size=1, conv_first=False) if res_block == 0: # linear projection residual", "be 9n+2 (eg 56 or 110 in [b])') # Start", "res_block == 0: # first layer but not first stage", "to match # changed dims x = resnet_layer(inputs=x, num_filters=num_filters_out, kernel_size=1,", "Activation('softmax')(logits) # Instantiate model. model = Model(inputs=inputs, outputs=outputs) return model,", "in range(num_res_blocks): activation = 'relu' batch_normalization = True strides =", "conv_first=False) if res_block == 0: # linear projection residual shortcut", "% 9 != 0: raise ValueError('depth should be 9n+2 (eg", "resnet_layer(inputs=x, num_filters=num_filters_out, kernel_size=1, strides=strides, activation=None, batch_normalization=False) x = tensorflow.keras.layers.add([x, y])", "= Dense(num_classes, kernel_initializer='he_normal')(final_features) outputs = Activation('softmax')(logits) # Instantiate model. model" ]
[ "'isSpouseOf'] RELS_TO_INFER = ['hasGrandParent', 'isGrandParentOf', 'hasGreatGrandParent', 'isGreatGrandParentOf', 'isUncleOf', 'hasUncle', 'isGreatUncleOf',", "{} graph['nodes'] = [] graph['edges'] = [] nodes = {}", "graph['nodes'] = [] graph['edges'] = [] nodes = {} q", "?obj . VALUES ?testPred { fhkb:isWifeOf fhkb:isMotherOf fhkb:isFatherOf fhkb:isHusbandOf fhkb:isSpouseOf", "fhkb:hasUncle fhkb:isGreatUncleOf fhkb:hasGreatUncle fhkb:isAuntOf fhkb:hasAunt fhkb:isGreatAuntOf fhkb:hasGreatAunt fhkb:isBrotherOf fhkb:isSisterOf fhkb:isSiblingOf", "fhkb:isSiblingOf fhkb:isFirstCousinOf fhkb:isSecondCousinOf fhkb:isThirdCousinOf fhkb:hasRelation fhkb:isPartnerIn fhkb:isMalePartnerIn fhkb:isFemalePartnerIn fhkb:isBloodrelationOf }", "{ 'label': '', 'degree': 0, 'size': 10, 'alternateNames': [], 'honorificPrefixes':", "'id': dump(subj), }}) if pred == FHKB.Sex: node['data'][dump(pred)] = dump(obj)", "SCHEMA_ORG = rdflib.Namespace(schema_str) def dump(uriref): if uriref.__contains__('#'): return uriref.split('#')[-1] return", "'isSisterOf', 'isSiblingOf', 'isFirstCousinOf', 'isSecondCousinOf', 'isThirdCousinOf'] RELS_OF_INTEREST = RELS_TO_DRAW + RELS_TO_INFER", "?pred ?obj . } ORDER BY ?person\"\"\") for rel in", "{ fhkb:isWifeOf fhkb:isMotherOf fhkb:isFatherOf fhkb:isHusbandOf fhkb:isSpouseOf fhkb:hasGrandParent fhkb:isGrandParentOf fhkb:hasGreatGrandParent fhkb:isGreatGrandParentOf", "= [] nodes = {} q = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#>", "node['data']['images'].append(obj) else: node['data'][dump(pred)] = obj elif pred == rdflib.RDFS.label: node['data']['label']", "fhkb:hasGreatGrandParent fhkb:isGreatGrandParentOf fhkb:isUncleOf fhkb:hasUncle fhkb:isGreatUncleOf fhkb:hasGreatUncle fhkb:isAuntOf fhkb:hasAunt fhkb:isGreatAuntOf fhkb:hasGreatAunt", "= obj elif pred == rdflib.RDFS.label: node['data']['label'] = obj else:", "'label': '', 'degree': 0, 'size': 10, 'alternateNames': [], 'honorificPrefixes': [],", "'honorificPrefixes': [], 'honorificSuffixes': [], 'images': [], 'id': dump(subj), }}) if", "fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person ?pred ?obj WHERE { ?person a fhkb:Person", "ORDER BY ?person\"\"\" ) person_query_results = g.query(q_details) for (subj, pred,", "{} q = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person ?pred ?obj", "fhkb:isBrotherOf fhkb:isSisterOf fhkb:isSiblingOf fhkb:isFirstCousinOf fhkb:isSecondCousinOf fhkb:isThirdCousinOf fhkb:hasRelation fhkb:isPartnerIn fhkb:isMalePartnerIn fhkb:isFemalePartnerIn", "= sys.argv[1] except IndexError: sys.exit(\"No path defined!\") try: recursion_limit =", "{ ?person a fhkb:Person ; ?pred ?obj . } ORDER", "node['data'][dump(pred)] = dump(obj) elif pred.startswith(SCHEMA_ORG): if dump(pred) == 'honorificSuffix': node['data']['honorificSuffixes'].append(obj)", "workpath = sys.argv[1] except IndexError: sys.exit(\"No path defined!\") try: recursion_limit", "graph = {} graph['nodes'] = [] graph['edges'] = [] nodes", "== 'honorificSuffix': node['data']['honorificSuffixes'].append(obj) elif dump(pred) == 'honorificPrefix': node['data']['honorificPrefixes'].append(obj) elif dump(pred)", "pred, obj) in person_query_results: node = nodes.get(dump(subj), { 'data': {", "fhkb:isGreatGrandParentOf fhkb:isUncleOf fhkb:hasUncle fhkb:isGreatUncleOf fhkb:hasGreatUncle fhkb:isAuntOf fhkb:hasAunt fhkb:isGreatAuntOf fhkb:hasGreatAunt fhkb:isBrotherOf", "RELS_OF_INTEREST: pred = rdflib.URIRef(\"{}{}\".format(fhkb_str, rel)) relation_query_results = g.query(q, initBindings={'pred': pred})", "; ?pred ?obj . FILTER NOT EXISTS { ?person ?testPred", "dump(uriref): if uriref.__contains__('#'): return uriref.split('#')[-1] return uriref.split('/')[-1] graph = {}", "= rdflib.Namespace(schema_str) def dump(uriref): if uriref.__contains__('#'): return uriref.split('#')[-1] return uriref.split('/')[-1]", "{ ?person ?testPred ?obj . VALUES ?testPred { fhkb:isWifeOf fhkb:isMotherOf", ". VALUES ?testPred { fhkb:isWifeOf fhkb:isMotherOf fhkb:isFatherOf fhkb:isHusbandOf fhkb:isSpouseOf fhkb:hasGrandParent", "elif pred == rdflib.RDFS.label: node['data']['label'] = obj else: continue nodes[dump(subj)]", "fhkb:isSecondCousinOf fhkb:isThirdCousinOf fhkb:hasRelation fhkb:isPartnerIn fhkb:isMalePartnerIn fhkb:isFemalePartnerIn fhkb:isBloodrelationOf } } }", "== 'honorificPrefix': node['data']['honorificPrefixes'].append(obj) elif dump(pred) == 'alternateName': node['data']['alternateNames'].append(obj) elif dump(pred)", "python3 import sys import json import rdflib import rdflib.plugins.sparql as", "{ 'data': { 'label': '', 'degree': 0, 'size': 10, 'alternateNames':", "try: workpath = sys.argv[1] except IndexError: sys.exit(\"No path defined!\") try:", "= sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person ?pred ?obj WHERE {", "0: sys.setrecursionlimit(recursion_limit) g = rdflib.Graph() g.parse(workpath, format=\"turtle\") fhkb_str = \"http://www.example.com/genealogy.owl#\"", "graph['edges'] = [] nodes = {} q = sparql.prepareQuery( \"\"\"PREFIX", "nodes = {} q = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person", "= \"https://schema.org/\" FHKB = rdflib.Namespace(fhkb_str) SCHEMA_ORG = rdflib.Namespace(schema_str) def dump(uriref):", "relation_query_results = g.query(q, initBindings={'pred': pred}) for (subj, pred, obj) in", "'id': f'{dump(subj)}-{dump(pred)}-{dump(obj)}', 'source': dump(subj), 'target': dump(obj), 'type': dump(pred) } })", "pred}) for (subj, pred, obj) in relation_query_results: graph['edges'].append( { 'data':", "person_query_results = g.query(q_details) for (subj, pred, obj) in person_query_results: node", "} } } ORDER BY ?person\"\"\" ) person_query_results = g.query(q_details)", "pred == FHKB.Sex: node['data'][dump(pred)] = dump(obj) elif pred.startswith(SCHEMA_ORG): if dump(pred)", "fhkb:isFemalePartnerIn fhkb:isBloodrelationOf } } } ORDER BY ?person\"\"\" ) person_query_results", "nodes.get(dump(subj), { 'data': { 'label': '', 'degree': 0, 'size': 10,", "= [] graph['edges'] = [] nodes = {} q =", "relation_query_results: graph['edges'].append( { 'data': { 'group': 'edges', 'id': f'{dump(subj)}-{dump(pred)}-{dump(obj)}', 'source':", "'group': 'edges', 'id': f'{dump(subj)}-{dump(pred)}-{dump(obj)}', 'source': dump(subj), 'target': dump(obj), 'type': dump(pred)", "fhkb:isFirstCousinOf fhkb:isSecondCousinOf fhkb:isThirdCousinOf fhkb:hasRelation fhkb:isPartnerIn fhkb:isMalePartnerIn fhkb:isFemalePartnerIn fhkb:isBloodrelationOf } }", "obj) in person_query_results: node = nodes.get(dump(subj), { 'data': { 'label':", "if dump(pred) == 'honorificSuffix': node['data']['honorificSuffixes'].append(obj) elif dump(pred) == 'honorificPrefix': node['data']['honorificPrefixes'].append(obj)", "rdflib.RDFS.label: node['data']['label'] = obj else: continue nodes[dump(subj)] = node graph['nodes']", "'honorificSuffixes': [], 'images': [], 'id': dump(subj), }}) if pred ==", "for (subj, pred, obj) in person_query_results: node = nodes.get(dump(subj), {", "'size': 10, 'alternateNames': [], 'honorificPrefixes': [], 'honorificSuffixes': [], 'images': [],", ") person_query_results = g.query(q_details) for (subj, pred, obj) in person_query_results:", "graph['edges'].append( { 'data': { 'group': 'edges', 'id': f'{dump(subj)}-{dump(pred)}-{dump(obj)}', 'source': dump(subj),", "'hasGreatGrandParent', 'isGreatGrandParentOf', 'isUncleOf', 'hasUncle', 'isGreatUncleOf', 'hasGreatUncle', 'isAuntOf', 'hasAunt', 'isGreatAuntOf', 'hasGreatAunt',", "fhkb:isWifeOf fhkb:isMotherOf fhkb:isFatherOf fhkb:isHusbandOf fhkb:isSpouseOf fhkb:hasGrandParent fhkb:isGrandParentOf fhkb:hasGreatGrandParent fhkb:isGreatGrandParentOf fhkb:isUncleOf", "rel)) relation_query_results = g.query(q, initBindings={'pred': pred}) for (subj, pred, obj)", "import sys import json import rdflib import rdflib.plugins.sparql as sparql", "sys.setrecursionlimit(recursion_limit) g = rdflib.Graph() g.parse(workpath, format=\"turtle\") fhkb_str = \"http://www.example.com/genealogy.owl#\" schema_str", "fhkb:isPartnerIn fhkb:isMalePartnerIn fhkb:isFemalePartnerIn fhkb:isBloodrelationOf } } } ORDER BY ?person\"\"\"", "rdflib.plugins.sparql as sparql RELS_TO_DRAW = ['isWifeOf', 'isMotherOf', 'isFatherOf', 'isHusbandOf', 'isSpouseOf']", "RELS_TO_DRAW + RELS_TO_INFER try: workpath = sys.argv[1] except IndexError: sys.exit(\"No", "rdflib import rdflib.plugins.sparql as sparql RELS_TO_DRAW = ['isWifeOf', 'isMotherOf', 'isFatherOf',", "recursion_limit = 0 if recursion_limit > 0: sys.setrecursionlimit(recursion_limit) g =", "= rdflib.Namespace(fhkb_str) SCHEMA_ORG = rdflib.Namespace(schema_str) def dump(uriref): if uriref.__contains__('#'): return", "defined!\") try: recursion_limit = int(sys.argv[2]) except IndexError: recursion_limit = 0", "sparql RELS_TO_DRAW = ['isWifeOf', 'isMotherOf', 'isFatherOf', 'isHusbandOf', 'isSpouseOf'] RELS_TO_INFER =", "uriref.split('#')[-1] return uriref.split('/')[-1] graph = {} graph['nodes'] = [] graph['edges']", "= {} q = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person ?pred", "return uriref.split('#')[-1] return uriref.split('/')[-1] graph = {} graph['nodes'] = []", "?pred ?obj WHERE { ?person a fhkb:Person ; ?pred ?obj", "RELS_TO_DRAW = ['isWifeOf', 'isMotherOf', 'isFatherOf', 'isHusbandOf', 'isSpouseOf'] RELS_TO_INFER = ['hasGrandParent',", "return uriref.split('/')[-1] graph = {} graph['nodes'] = [] graph['edges'] =", "}) q_details = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person ?pred ?obj", "fhkb:hasAunt fhkb:isGreatAuntOf fhkb:hasGreatAunt fhkb:isBrotherOf fhkb:isSisterOf fhkb:isSiblingOf fhkb:isFirstCousinOf fhkb:isSecondCousinOf fhkb:isThirdCousinOf fhkb:hasRelation", "'degree': 0, 'size': 10, 'alternateNames': [], 'honorificPrefixes': [], 'honorificSuffixes': [],", "= obj else: continue nodes[dump(subj)] = node graph['nodes'] = list(nodes.values())", "fhkb:Person ; ?pred ?obj . FILTER NOT EXISTS { ?person", "== FHKB.Sex: node['data'][dump(pred)] = dump(obj) elif pred.startswith(SCHEMA_ORG): if dump(pred) ==", "format=\"turtle\") fhkb_str = \"http://www.example.com/genealogy.owl#\" schema_str = \"https://schema.org/\" FHKB = rdflib.Namespace(fhkb_str)", "node['data']['honorificSuffixes'].append(obj) elif dump(pred) == 'honorificPrefix': node['data']['honorificPrefixes'].append(obj) elif dump(pred) == 'alternateName':", "sys import json import rdflib import rdflib.plugins.sparql as sparql RELS_TO_DRAW", "elif dump(pred) == 'honorificPrefix': node['data']['honorificPrefixes'].append(obj) elif dump(pred) == 'alternateName': node['data']['alternateNames'].append(obj)", "'type': dump(pred) } }) q_details = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT", "dump(pred) == 'honorificPrefix': node['data']['honorificPrefixes'].append(obj) elif dump(pred) == 'alternateName': node['data']['alternateNames'].append(obj) elif", "'isAuntOf', 'hasAunt', 'isGreatAuntOf', 'hasGreatAunt', 'isBrotherOf', 'isSisterOf', 'isSiblingOf', 'isFirstCousinOf', 'isSecondCousinOf', 'isThirdCousinOf']", "in RELS_OF_INTEREST: pred = rdflib.URIRef(\"{}{}\".format(fhkb_str, rel)) relation_query_results = g.query(q, initBindings={'pred':", "(subj, pred, obj) in person_query_results: node = nodes.get(dump(subj), { 'data':", "= dump(obj) elif pred.startswith(SCHEMA_ORG): if dump(pred) == 'honorificSuffix': node['data']['honorificSuffixes'].append(obj) elif", "pred == rdflib.RDFS.label: node['data']['label'] = obj else: continue nodes[dump(subj)] =", "RELS_TO_INFER = ['hasGrandParent', 'isGrandParentOf', 'hasGreatGrandParent', 'isGreatGrandParentOf', 'isUncleOf', 'hasUncle', 'isGreatUncleOf', 'hasGreatUncle',", "'honorificPrefix': node['data']['honorificPrefixes'].append(obj) elif dump(pred) == 'alternateName': node['data']['alternateNames'].append(obj) elif dump(pred) ==", "'', 'degree': 0, 'size': 10, 'alternateNames': [], 'honorificPrefixes': [], 'honorificSuffixes':", "g.query(q_details) for (subj, pred, obj) in person_query_results: node = nodes.get(dump(subj),", "= 0 if recursion_limit > 0: sys.setrecursionlimit(recursion_limit) g = rdflib.Graph()", "fhkb:isMalePartnerIn fhkb:isFemalePartnerIn fhkb:isBloodrelationOf } } } ORDER BY ?person\"\"\" )", "if recursion_limit > 0: sys.setrecursionlimit(recursion_limit) g = rdflib.Graph() g.parse(workpath, format=\"turtle\")", "RELS_OF_INTEREST = RELS_TO_DRAW + RELS_TO_INFER try: workpath = sys.argv[1] except", "dump(pred) == 'image': node['data']['images'].append(obj) else: node['data'][dump(pred)] = obj elif pred", "continue nodes[dump(subj)] = node graph['nodes'] = list(nodes.values()) print(json.dumps(graph, indent=0)) sys.exit(0)", "dump(pred) } }) q_details = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person", "}}) if pred == FHKB.Sex: node['data'][dump(pred)] = dump(obj) elif pred.startswith(SCHEMA_ORG):", "node['data']['label'] = obj else: continue nodes[dump(subj)] = node graph['nodes'] =", "rdflib.Namespace(schema_str) def dump(uriref): if uriref.__contains__('#'): return uriref.split('#')[-1] return uriref.split('/')[-1] graph", ". FILTER NOT EXISTS { ?person ?testPred ?obj . VALUES", ". } ORDER BY ?person\"\"\") for rel in RELS_OF_INTEREST: pred", "RELS_TO_INFER try: workpath = sys.argv[1] except IndexError: sys.exit(\"No path defined!\")", "'data': { 'group': 'edges', 'id': f'{dump(subj)}-{dump(pred)}-{dump(obj)}', 'source': dump(subj), 'target': dump(obj),", "a fhkb:Person ; ?pred ?obj . FILTER NOT EXISTS {", "'edges', 'id': f'{dump(subj)}-{dump(pred)}-{dump(obj)}', 'source': dump(subj), 'target': dump(obj), 'type': dump(pred) }", "fhkb:isUncleOf fhkb:hasUncle fhkb:isGreatUncleOf fhkb:hasGreatUncle fhkb:isAuntOf fhkb:hasAunt fhkb:isGreatAuntOf fhkb:hasGreatAunt fhkb:isBrotherOf fhkb:isSisterOf", "[] nodes = {} q = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT", "= nodes.get(dump(subj), { 'data': { 'label': '', 'degree': 0, 'size':", "'image': node['data']['images'].append(obj) else: node['data'][dump(pred)] = obj elif pred == rdflib.RDFS.label:", "obj elif pred == rdflib.RDFS.label: node['data']['label'] = obj else: continue", "for rel in RELS_OF_INTEREST: pred = rdflib.URIRef(\"{}{}\".format(fhkb_str, rel)) relation_query_results =", "'target': dump(obj), 'type': dump(pred) } }) q_details = sparql.prepareQuery( \"\"\"PREFIX", "fhkb:isHusbandOf fhkb:isSpouseOf fhkb:hasGrandParent fhkb:isGrandParentOf fhkb:hasGreatGrandParent fhkb:isGreatGrandParentOf fhkb:isUncleOf fhkb:hasUncle fhkb:isGreatUncleOf fhkb:hasGreatUncle", "fhkb:isGreatUncleOf fhkb:hasGreatUncle fhkb:isAuntOf fhkb:hasAunt fhkb:isGreatAuntOf fhkb:hasGreatAunt fhkb:isBrotherOf fhkb:isSisterOf fhkb:isSiblingOf fhkb:isFirstCousinOf", "rdflib.Namespace(fhkb_str) SCHEMA_ORG = rdflib.Namespace(schema_str) def dump(uriref): if uriref.__contains__('#'): return uriref.split('#')[-1]", "'isThirdCousinOf'] RELS_OF_INTEREST = RELS_TO_DRAW + RELS_TO_INFER try: workpath = sys.argv[1]", "pred, obj) in relation_query_results: graph['edges'].append( { 'data': { 'group': 'edges',", "} ORDER BY ?person\"\"\" ) person_query_results = g.query(q_details) for (subj,", "?obj . } ORDER BY ?person\"\"\") for rel in RELS_OF_INTEREST:", "dump(subj), 'target': dump(obj), 'type': dump(pred) } }) q_details = sparql.prepareQuery(", "in person_query_results: node = nodes.get(dump(subj), { 'data': { 'label': '',", "elif dump(pred) == 'alternateName': node['data']['alternateNames'].append(obj) elif dump(pred) == 'image': node['data']['images'].append(obj)", "g.parse(workpath, format=\"turtle\") fhkb_str = \"http://www.example.com/genealogy.owl#\" schema_str = \"https://schema.org/\" FHKB =", "= g.query(q, initBindings={'pred': pred}) for (subj, pred, obj) in relation_query_results:", "'honorificSuffix': node['data']['honorificSuffixes'].append(obj) elif dump(pred) == 'honorificPrefix': node['data']['honorificPrefixes'].append(obj) elif dump(pred) ==", "BY ?person\"\"\" ) person_query_results = g.query(q_details) for (subj, pred, obj)", "'hasAunt', 'isGreatAuntOf', 'hasGreatAunt', 'isBrotherOf', 'isSisterOf', 'isSiblingOf', 'isFirstCousinOf', 'isSecondCousinOf', 'isThirdCousinOf'] RELS_OF_INTEREST", "FILTER NOT EXISTS { ?person ?testPred ?obj . VALUES ?testPred", "dump(obj) elif pred.startswith(SCHEMA_ORG): if dump(pred) == 'honorificSuffix': node['data']['honorificSuffixes'].append(obj) elif dump(pred)", "'isFirstCousinOf', 'isSecondCousinOf', 'isThirdCousinOf'] RELS_OF_INTEREST = RELS_TO_DRAW + RELS_TO_INFER try: workpath", "elif dump(pred) == 'image': node['data']['images'].append(obj) else: node['data'][dump(pred)] = obj elif", "0 if recursion_limit > 0: sys.setrecursionlimit(recursion_limit) g = rdflib.Graph() g.parse(workpath,", "path defined!\") try: recursion_limit = int(sys.argv[2]) except IndexError: recursion_limit =", "g = rdflib.Graph() g.parse(workpath, format=\"turtle\") fhkb_str = \"http://www.example.com/genealogy.owl#\" schema_str =", "node['data']['alternateNames'].append(obj) elif dump(pred) == 'image': node['data']['images'].append(obj) else: node['data'][dump(pred)] = obj", "sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person ?pred ?obj WHERE { ?person", "{ 'group': 'edges', 'id': f'{dump(subj)}-{dump(pred)}-{dump(obj)}', 'source': dump(subj), 'target': dump(obj), 'type':", "0, 'size': 10, 'alternateNames': [], 'honorificPrefixes': [], 'honorificSuffixes': [], 'images':", "?person\"\"\") for rel in RELS_OF_INTEREST: pred = rdflib.URIRef(\"{}{}\".format(fhkb_str, rel)) relation_query_results", "rdflib.URIRef(\"{}{}\".format(fhkb_str, rel)) relation_query_results = g.query(q, initBindings={'pred': pred}) for (subj, pred,", "try: recursion_limit = int(sys.argv[2]) except IndexError: recursion_limit = 0 if", "?person a fhkb:Person ; ?pred ?obj . } ORDER BY", "recursion_limit > 0: sys.setrecursionlimit(recursion_limit) g = rdflib.Graph() g.parse(workpath, format=\"turtle\") fhkb_str", "?testPred { fhkb:isWifeOf fhkb:isMotherOf fhkb:isFatherOf fhkb:isHusbandOf fhkb:isSpouseOf fhkb:hasGrandParent fhkb:isGrandParentOf fhkb:hasGreatGrandParent", "= {} graph['nodes'] = [] graph['edges'] = [] nodes =", "def dump(uriref): if uriref.__contains__('#'): return uriref.split('#')[-1] return uriref.split('/')[-1] graph =", "except IndexError: sys.exit(\"No path defined!\") try: recursion_limit = int(sys.argv[2]) except", "g.query(q, initBindings={'pred': pred}) for (subj, pred, obj) in relation_query_results: graph['edges'].append(", "= ['hasGrandParent', 'isGrandParentOf', 'hasGreatGrandParent', 'isGreatGrandParentOf', 'isUncleOf', 'hasUncle', 'isGreatUncleOf', 'hasGreatUncle', 'isAuntOf',", "rdflib.Graph() g.parse(workpath, format=\"turtle\") fhkb_str = \"http://www.example.com/genealogy.owl#\" schema_str = \"https://schema.org/\" FHKB", "recursion_limit = int(sys.argv[2]) except IndexError: recursion_limit = 0 if recursion_limit", "{ 'data': { 'group': 'edges', 'id': f'{dump(subj)}-{dump(pred)}-{dump(obj)}', 'source': dump(subj), 'target':", "['isWifeOf', 'isMotherOf', 'isFatherOf', 'isHusbandOf', 'isSpouseOf'] RELS_TO_INFER = ['hasGrandParent', 'isGrandParentOf', 'hasGreatGrandParent',", "'hasUncle', 'isGreatUncleOf', 'hasGreatUncle', 'isAuntOf', 'hasAunt', 'isGreatAuntOf', 'hasGreatAunt', 'isBrotherOf', 'isSisterOf', 'isSiblingOf',", "uriref.split('/')[-1] graph = {} graph['nodes'] = [] graph['edges'] = []", "q = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person ?pred ?obj WHERE", "'source': dump(subj), 'target': dump(obj), 'type': dump(pred) } }) q_details =", "fhkb:isAuntOf fhkb:hasAunt fhkb:isGreatAuntOf fhkb:hasGreatAunt fhkb:isBrotherOf fhkb:isSisterOf fhkb:isSiblingOf fhkb:isFirstCousinOf fhkb:isSecondCousinOf fhkb:isThirdCousinOf", "?person ?testPred ?obj . VALUES ?testPred { fhkb:isWifeOf fhkb:isMotherOf fhkb:isFatherOf", "ORDER BY ?person\"\"\") for rel in RELS_OF_INTEREST: pred = rdflib.URIRef(\"{}{}\".format(fhkb_str,", "pred.startswith(SCHEMA_ORG): if dump(pred) == 'honorificSuffix': node['data']['honorificSuffixes'].append(obj) elif dump(pred) == 'honorificPrefix':", "'isGrandParentOf', 'hasGreatGrandParent', 'isGreatGrandParentOf', 'isUncleOf', 'hasUncle', 'isGreatUncleOf', 'hasGreatUncle', 'isAuntOf', 'hasAunt', 'isGreatAuntOf',", "; ?pred ?obj . } ORDER BY ?person\"\"\") for rel", "} } ORDER BY ?person\"\"\" ) person_query_results = g.query(q_details) for", "node['data']['honorificPrefixes'].append(obj) elif dump(pred) == 'alternateName': node['data']['alternateNames'].append(obj) elif dump(pred) == 'image':", "WHERE { ?person a fhkb:Person ; ?pred ?obj . }", "elif pred.startswith(SCHEMA_ORG): if dump(pred) == 'honorificSuffix': node['data']['honorificSuffixes'].append(obj) elif dump(pred) ==", "for (subj, pred, obj) in relation_query_results: graph['edges'].append( { 'data': {", "in relation_query_results: graph['edges'].append( { 'data': { 'group': 'edges', 'id': f'{dump(subj)}-{dump(pred)}-{dump(obj)}',", "?testPred ?obj . VALUES ?testPred { fhkb:isWifeOf fhkb:isMotherOf fhkb:isFatherOf fhkb:isHusbandOf", "['hasGrandParent', 'isGrandParentOf', 'hasGreatGrandParent', 'isGreatGrandParentOf', 'isUncleOf', 'hasUncle', 'isGreatUncleOf', 'hasGreatUncle', 'isAuntOf', 'hasAunt',", "'isGreatAuntOf', 'hasGreatAunt', 'isBrotherOf', 'isSisterOf', 'isSiblingOf', 'isFirstCousinOf', 'isSecondCousinOf', 'isThirdCousinOf'] RELS_OF_INTEREST =", "pred = rdflib.URIRef(\"{}{}\".format(fhkb_str, rel)) relation_query_results = g.query(q, initBindings={'pred': pred}) for", "= rdflib.URIRef(\"{}{}\".format(fhkb_str, rel)) relation_query_results = g.query(q, initBindings={'pred': pred}) for (subj,", "'alternateName': node['data']['alternateNames'].append(obj) elif dump(pred) == 'image': node['data']['images'].append(obj) else: node['data'][dump(pred)] =", "else: node['data'][dump(pred)] = obj elif pred == rdflib.RDFS.label: node['data']['label'] =", "dump(pred) == 'alternateName': node['data']['alternateNames'].append(obj) elif dump(pred) == 'image': node['data']['images'].append(obj) else:", "'hasGreatAunt', 'isBrotherOf', 'isSisterOf', 'isSiblingOf', 'isFirstCousinOf', 'isSecondCousinOf', 'isThirdCousinOf'] RELS_OF_INTEREST = RELS_TO_DRAW", "'isBrotherOf', 'isSisterOf', 'isSiblingOf', 'isFirstCousinOf', 'isSecondCousinOf', 'isThirdCousinOf'] RELS_OF_INTEREST = RELS_TO_DRAW +", "?pred ?obj . FILTER NOT EXISTS { ?person ?testPred ?obj", "'isGreatUncleOf', 'hasGreatUncle', 'isAuntOf', 'hasAunt', 'isGreatAuntOf', 'hasGreatAunt', 'isBrotherOf', 'isSisterOf', 'isSiblingOf', 'isFirstCousinOf',", "IndexError: recursion_limit = 0 if recursion_limit > 0: sys.setrecursionlimit(recursion_limit) g", "> 0: sys.setrecursionlimit(recursion_limit) g = rdflib.Graph() g.parse(workpath, format=\"turtle\") fhkb_str =", "'images': [], 'id': dump(subj), }}) if pred == FHKB.Sex: node['data'][dump(pred)]", "= RELS_TO_DRAW + RELS_TO_INFER try: workpath = sys.argv[1] except IndexError:", "fhkb:hasGreatUncle fhkb:isAuntOf fhkb:hasAunt fhkb:isGreatAuntOf fhkb:hasGreatAunt fhkb:isBrotherOf fhkb:isSisterOf fhkb:isSiblingOf fhkb:isFirstCousinOf fhkb:isSecondCousinOf", "== 'alternateName': node['data']['alternateNames'].append(obj) elif dump(pred) == 'image': node['data']['images'].append(obj) else: node['data'][dump(pred)]", "import json import rdflib import rdflib.plugins.sparql as sparql RELS_TO_DRAW =", "a fhkb:Person ; ?pred ?obj . } ORDER BY ?person\"\"\")", "fhkb:hasGrandParent fhkb:isGrandParentOf fhkb:hasGreatGrandParent fhkb:isGreatGrandParentOf fhkb:isUncleOf fhkb:hasUncle fhkb:isGreatUncleOf fhkb:hasGreatUncle fhkb:isAuntOf fhkb:hasAunt", "?obj . FILTER NOT EXISTS { ?person ?testPred ?obj .", "= \"http://www.example.com/genealogy.owl#\" schema_str = \"https://schema.org/\" FHKB = rdflib.Namespace(fhkb_str) SCHEMA_ORG =", "[], 'images': [], 'id': dump(subj), }}) if pred == FHKB.Sex:", "[], 'id': dump(subj), }}) if pred == FHKB.Sex: node['data'][dump(pred)] =", "fhkb:hasRelation fhkb:isPartnerIn fhkb:isMalePartnerIn fhkb:isFemalePartnerIn fhkb:isBloodrelationOf } } } ORDER BY", "int(sys.argv[2]) except IndexError: recursion_limit = 0 if recursion_limit > 0:", "'isSecondCousinOf', 'isThirdCousinOf'] RELS_OF_INTEREST = RELS_TO_DRAW + RELS_TO_INFER try: workpath =", "fhkb:isMotherOf fhkb:isFatherOf fhkb:isHusbandOf fhkb:isSpouseOf fhkb:hasGrandParent fhkb:isGrandParentOf fhkb:hasGreatGrandParent fhkb:isGreatGrandParentOf fhkb:isUncleOf fhkb:hasUncle", "= int(sys.argv[2]) except IndexError: recursion_limit = 0 if recursion_limit >", "?obj WHERE { ?person a fhkb:Person ; ?pred ?obj .", "BY ?person\"\"\") for rel in RELS_OF_INTEREST: pred = rdflib.URIRef(\"{}{}\".format(fhkb_str, rel))", "VALUES ?testPred { fhkb:isWifeOf fhkb:isMotherOf fhkb:isFatherOf fhkb:isHusbandOf fhkb:isSpouseOf fhkb:hasGrandParent fhkb:isGrandParentOf", "\"https://schema.org/\" FHKB = rdflib.Namespace(fhkb_str) SCHEMA_ORG = rdflib.Namespace(schema_str) def dump(uriref): if", "NOT EXISTS { ?person ?testPred ?obj . VALUES ?testPred {", "sys.argv[1] except IndexError: sys.exit(\"No path defined!\") try: recursion_limit = int(sys.argv[2])", "'isHusbandOf', 'isSpouseOf'] RELS_TO_INFER = ['hasGrandParent', 'isGrandParentOf', 'hasGreatGrandParent', 'isGreatGrandParentOf', 'isUncleOf', 'hasUncle',", "(subj, pred, obj) in relation_query_results: graph['edges'].append( { 'data': { 'group':", "uriref.__contains__('#'): return uriref.split('#')[-1] return uriref.split('/')[-1] graph = {} graph['nodes'] =", "fhkb:isGrandParentOf fhkb:hasGreatGrandParent fhkb:isGreatGrandParentOf fhkb:isUncleOf fhkb:hasUncle fhkb:isGreatUncleOf fhkb:hasGreatUncle fhkb:isAuntOf fhkb:hasAunt fhkb:isGreatAuntOf", "'isSiblingOf', 'isFirstCousinOf', 'isSecondCousinOf', 'isThirdCousinOf'] RELS_OF_INTEREST = RELS_TO_DRAW + RELS_TO_INFER try:", "EXISTS { ?person ?testPred ?obj . VALUES ?testPred { fhkb:isWifeOf", "== 'image': node['data']['images'].append(obj) else: node['data'][dump(pred)] = obj elif pred ==", "= g.query(q_details) for (subj, pred, obj) in person_query_results: node =", "if pred == FHKB.Sex: node['data'][dump(pred)] = dump(obj) elif pred.startswith(SCHEMA_ORG): if", "{ ?person a fhkb:Person ; ?pred ?obj . FILTER NOT", "node['data'][dump(pred)] = obj elif pred == rdflib.RDFS.label: node['data']['label'] = obj", "schema_str = \"https://schema.org/\" FHKB = rdflib.Namespace(fhkb_str) SCHEMA_ORG = rdflib.Namespace(schema_str) def", "fhkb:Person ; ?pred ?obj . } ORDER BY ?person\"\"\") for", "initBindings={'pred': pred}) for (subj, pred, obj) in relation_query_results: graph['edges'].append( {", "\"http://www.example.com/genealogy.owl#\" schema_str = \"https://schema.org/\" FHKB = rdflib.Namespace(fhkb_str) SCHEMA_ORG = rdflib.Namespace(schema_str)", "fhkb:isGreatAuntOf fhkb:hasGreatAunt fhkb:isBrotherOf fhkb:isSisterOf fhkb:isSiblingOf fhkb:isFirstCousinOf fhkb:isSecondCousinOf fhkb:isThirdCousinOf fhkb:hasRelation fhkb:isPartnerIn", "fhkb:isSisterOf fhkb:isSiblingOf fhkb:isFirstCousinOf fhkb:isSecondCousinOf fhkb:isThirdCousinOf fhkb:hasRelation fhkb:isPartnerIn fhkb:isMalePartnerIn fhkb:isFemalePartnerIn fhkb:isBloodrelationOf", "obj else: continue nodes[dump(subj)] = node graph['nodes'] = list(nodes.values()) print(json.dumps(graph,", "q_details = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person ?pred ?obj WHERE", "'alternateNames': [], 'honorificPrefixes': [], 'honorificSuffixes': [], 'images': [], 'id': dump(subj),", "else: continue nodes[dump(subj)] = node graph['nodes'] = list(nodes.values()) print(json.dumps(graph, indent=0))", "IndexError: sys.exit(\"No path defined!\") try: recursion_limit = int(sys.argv[2]) except IndexError:", "rel in RELS_OF_INTEREST: pred = rdflib.URIRef(\"{}{}\".format(fhkb_str, rel)) relation_query_results = g.query(q,", "fhkb:isThirdCousinOf fhkb:hasRelation fhkb:isPartnerIn fhkb:isMalePartnerIn fhkb:isFemalePartnerIn fhkb:isBloodrelationOf } } } ORDER", "except IndexError: recursion_limit = 0 if recursion_limit > 0: sys.setrecursionlimit(recursion_limit)", "} ORDER BY ?person\"\"\") for rel in RELS_OF_INTEREST: pred =", "[], 'honorificSuffixes': [], 'images': [], 'id': dump(subj), }}) if pred", "sys.exit(\"No path defined!\") try: recursion_limit = int(sys.argv[2]) except IndexError: recursion_limit", "fhkb_str = \"http://www.example.com/genealogy.owl#\" schema_str = \"https://schema.org/\" FHKB = rdflib.Namespace(fhkb_str) SCHEMA_ORG", "json import rdflib import rdflib.plugins.sparql as sparql RELS_TO_DRAW = ['isWifeOf',", "SELECT ?person ?pred ?obj WHERE { ?person a fhkb:Person ;", "'isUncleOf', 'hasUncle', 'isGreatUncleOf', 'hasGreatUncle', 'isAuntOf', 'hasAunt', 'isGreatAuntOf', 'hasGreatAunt', 'isBrotherOf', 'isSisterOf',", "?person a fhkb:Person ; ?pred ?obj . FILTER NOT EXISTS", "fhkb:hasGreatAunt fhkb:isBrotherOf fhkb:isSisterOf fhkb:isSiblingOf fhkb:isFirstCousinOf fhkb:isSecondCousinOf fhkb:isThirdCousinOf fhkb:hasRelation fhkb:isPartnerIn fhkb:isMalePartnerIn", "dump(subj), }}) if pred == FHKB.Sex: node['data'][dump(pred)] = dump(obj) elif", "+ RELS_TO_INFER try: workpath = sys.argv[1] except IndexError: sys.exit(\"No path", "f'{dump(subj)}-{dump(pred)}-{dump(obj)}', 'source': dump(subj), 'target': dump(obj), 'type': dump(pred) } }) q_details", "FHKB = rdflib.Namespace(fhkb_str) SCHEMA_ORG = rdflib.Namespace(schema_str) def dump(uriref): if uriref.__contains__('#'):", "node = nodes.get(dump(subj), { 'data': { 'label': '', 'degree': 0,", "'isGreatGrandParentOf', 'isUncleOf', 'hasUncle', 'isGreatUncleOf', 'hasGreatUncle', 'isAuntOf', 'hasAunt', 'isGreatAuntOf', 'hasGreatAunt', 'isBrotherOf',", "?person\"\"\" ) person_query_results = g.query(q_details) for (subj, pred, obj) in", "person_query_results: node = nodes.get(dump(subj), { 'data': { 'label': '', 'degree':", "obj) in relation_query_results: graph['edges'].append( { 'data': { 'group': 'edges', 'id':", "FHKB.Sex: node['data'][dump(pred)] = dump(obj) elif pred.startswith(SCHEMA_ORG): if dump(pred) == 'honorificSuffix':", "dump(obj), 'type': dump(pred) } }) q_details = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#>", "import rdflib.plugins.sparql as sparql RELS_TO_DRAW = ['isWifeOf', 'isMotherOf', 'isFatherOf', 'isHusbandOf',", "'data': { 'label': '', 'degree': 0, 'size': 10, 'alternateNames': [],", "[] graph['edges'] = [] nodes = {} q = sparql.prepareQuery(", "fhkb:isSpouseOf fhkb:hasGrandParent fhkb:isGrandParentOf fhkb:hasGreatGrandParent fhkb:isGreatGrandParentOf fhkb:isUncleOf fhkb:hasUncle fhkb:isGreatUncleOf fhkb:hasGreatUncle fhkb:isAuntOf", "fhkb:isFatherOf fhkb:isHusbandOf fhkb:isSpouseOf fhkb:hasGrandParent fhkb:isGrandParentOf fhkb:hasGreatGrandParent fhkb:isGreatGrandParentOf fhkb:isUncleOf fhkb:hasUncle fhkb:isGreatUncleOf", "'isMotherOf', 'isFatherOf', 'isHusbandOf', 'isSpouseOf'] RELS_TO_INFER = ['hasGrandParent', 'isGrandParentOf', 'hasGreatGrandParent', 'isGreatGrandParentOf',", "== rdflib.RDFS.label: node['data']['label'] = obj else: continue nodes[dump(subj)] = node", "#!/usr/bin/env python3 import sys import json import rdflib import rdflib.plugins.sparql", "import rdflib import rdflib.plugins.sparql as sparql RELS_TO_DRAW = ['isWifeOf', 'isMotherOf',", "'isFatherOf', 'isHusbandOf', 'isSpouseOf'] RELS_TO_INFER = ['hasGrandParent', 'isGrandParentOf', 'hasGreatGrandParent', 'isGreatGrandParentOf', 'isUncleOf',", "= ['isWifeOf', 'isMotherOf', 'isFatherOf', 'isHusbandOf', 'isSpouseOf'] RELS_TO_INFER = ['hasGrandParent', 'isGrandParentOf',", "10, 'alternateNames': [], 'honorificPrefixes': [], 'honorificSuffixes': [], 'images': [], 'id':", "= rdflib.Graph() g.parse(workpath, format=\"turtle\") fhkb_str = \"http://www.example.com/genealogy.owl#\" schema_str = \"https://schema.org/\"", "'hasGreatUncle', 'isAuntOf', 'hasAunt', 'isGreatAuntOf', 'hasGreatAunt', 'isBrotherOf', 'isSisterOf', 'isSiblingOf', 'isFirstCousinOf', 'isSecondCousinOf',", "\"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person ?pred ?obj WHERE { ?person a", "fhkb:isBloodrelationOf } } } ORDER BY ?person\"\"\" ) person_query_results =", "as sparql RELS_TO_DRAW = ['isWifeOf', 'isMotherOf', 'isFatherOf', 'isHusbandOf', 'isSpouseOf'] RELS_TO_INFER", "} }) q_details = sparql.prepareQuery( \"\"\"PREFIX fhkb:<http://www.example.com/genealogy.owl#> SELECT ?person ?pred", "dump(pred) == 'honorificSuffix': node['data']['honorificSuffixes'].append(obj) elif dump(pred) == 'honorificPrefix': node['data']['honorificPrefixes'].append(obj) elif", "[], 'honorificPrefixes': [], 'honorificSuffixes': [], 'images': [], 'id': dump(subj), }})", "WHERE { ?person a fhkb:Person ; ?pred ?obj . FILTER", "if uriref.__contains__('#'): return uriref.split('#')[-1] return uriref.split('/')[-1] graph = {} graph['nodes']", "?person ?pred ?obj WHERE { ?person a fhkb:Person ; ?pred" ]
[ "\"kermit\", \"iss\": \"test-issuer\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An", "\"\"\"An unknown IdP should cause a 400\"\"\" channel = self.make_request(", "ApplicationService.NS_ALIASES: [], }, ) self.another_service = ApplicationService( id=\"another__identifier\", token=\"another_token\", hostname=\"example.com\",", "-> FakeChannel: \"\"\"Send a request to /_matrix/client/r0/login/sso/redirect ... possibly specifying", "self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], 'JWT validation failed:", "\"CAS\"}, {\"id\": \"saml\", \"name\": \"SAML\"}, {\"id\": \"oidc-idp1\", \"name\": \"IDP1\"}, {\"id\":", "{\"id\": \"oidc\", \"name\": \"OIDC\"}, ], ) def test_multi_sso_redirect(self) -> None:", "self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers picker_url =", "2.0 (the \"License\"); # you may not use this file", "\"loginToken\") # finally, submit the matrix login token to the", "self._make_sso_redirect_request(\"oidc\") self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers oidc_uri", "whitelist this client URI so we redirect straight to it", "{**base_config, \"issuer\": \"test-issuer\"}}) def test_login_iss(self) -> None: \"\"\"Test validating the", "+ \"&idp=saml\", ) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert", "Matrix.org Foundation C.I.C. # # Licensed under the Apache License,", "check it has fields assumed elsewhere in this class html", "with wrong as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\":", "when using the appservice login method \"\"\" self.register_appservice_user(AS_USER, self.service.token) params", "\"password\": password, \"session\": channel.json_body[\"session\"], } channel = self.make_request( b\"DELETE\", \"devices/\"", "} ) def test_POST_ratelimiting_per_address(self) -> None: # Create different users", "+ session_id), # old versions of twisted don't do form-parsing", "the returned location matches the requested redirect URL path, query", "+ \" = \" for caveat in macaroon.caveats: if caveat.caveat_id.startswith(prefix):", "Any]: config = super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"oidc_config\"] = {}", "ApplicationService.NS_USERS: [ {\"regex\": r\"@as2_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES:", "use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\":", "return self.hs @override_config( { \"rc_login\": { \"address\": {\"per_second\": 0.17, \"burst_count\":", "overriding the entire # rc_login dict here, we need to", "the completion page content = urlencode({b\"username\": b\"bobby\"}).encode(\"utf8\") chan = self.make_request(", "self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.another_service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def", "do form-parsing without a valid # content-length header. (\"Content-Length\", str(len(content))),", "on xmlsec1 \"crypto_backend\": \"XMLSecurity\", }, } # default OIDC provider", "\"-----BEGIN PUBLIC KEY-----\", \"<KEY>\", \"<KEY> \"-----END PUBLIC KEY-----\", ] )", "\"identifier\": {\"type\": \"m.id.user\", \"user\": AS_USER}, } channel = self.make_request( b\"POST\",", "endpoint, custom_headers=[(\"Host\", SYNAPSE_SERVER_PUBLIC_HOSTNAME)], ) @staticmethod def _get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str)", "Now try to hard log out all of the user's", "channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"403\",", "limitations under the License. import json import time import urllib.parse", "= self.make_request(b\"POST\", LOGIN_URL, params) return channel def test_login_jwt_valid_registered(self) -> None:", "def test_client_idp_redirect_to_unknown(self) -> None: \"\"\"If the client tries to pick", "location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers picker_url = location_headers[0] self.assertEqual(picker_url, \"/_synapse/client/pick_username/account_details\")", "device id. chan = self.make_request( \"POST\", \"/login\", content={\"type\": \"m.login.token\", \"token\":", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_invalid_signature(self) -> None: channel", "reactor: MemoryReactor, clock: Clock) -> HomeServer: self.base_url = \"https://matrix.goodserver.com/\" self.redirect_path", "login_token = params[2][1] chan = self.make_request( \"POST\", \"/login\", content={\"type\": \"m.login.token\",", "cas_user_id async def get_raw(uri: str, args: Any) -> bytes: \"\"\"Return", "self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms / 1000.0) params = { \"type\":", "be a proper logout self._delete_device(access_token_2, \"kermit\", \"monkey\", device_id) channel =", "\"audiences\": [\"test-audience\"]}}) def test_login_aud(self) -> None: \"\"\"Test validating the audience", "+= \"?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) return self.make_request( \"GET\", endpoint, custom_headers=[(\"Host\", SYNAPSE_SERVER_PUBLIC_HOSTNAME)],", "test_login_jwt_invalid_signature(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"}, self.bad_privatekey) self.assertEqual(channel.result[\"code\"], b\"403\",", "where we instead use # RSS256, with a public key", "d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def test_get_login_flows(self) -> None:", "self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"audiences\": [\"test-audience\"]}}) def test_login_aud(self) -> None:", "we now log in as a different device access_token_2 =", "} channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config(", "= self.make_request(\"GET\", cas_ticket_url) # Because the user is deactivated they", "\"\"\"Tests for the username picker flow of SSO login\"\"\" servlets", "to the completion page content = urlencode({b\"username\": b\"bobby\"}).encode(\"utf8\") chan =", "self.setup_test_homeserver() self.service = ApplicationService( id=\"unique_identifier\", token=\"some_token\", hostname=\"example.com\", sender=\"@asbot:example.com\", namespaces={ ApplicationService.NS_USERS:", "channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) # Since", "@override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_after_being_soft_logged_out(self) -> None: self.register_user(\"kermit\", \"monkey\") # log", "} return config def jwt_encode(self, payload: Dict[str, Any], secret: str", "and # limitations under the License. import json import time", "specifying an IDP provider \"\"\" endpoint = \"/_matrix/client/r0/login/sso/redirect\" if idp_prov", "hitting that picker should give us some HTML channel =", "test_multi_sso_redirect_to_cas(self) -> None: \"\"\"If CAS is chosen, should redirect to", "and OIDC\") class MultiSSOTestCase(unittest.HomeserverTestCase): \"\"\"Tests for homeservers with multiple SSO", "200, channel.result) # time passes self.reactor.advance(24 * 3600) # ...", "url with some annoying characters in. %3D is =, %26", "\"{{ user.displayname }}\"} } # whitelist this client URI so", "License for the specific language governing permissions and # limitations", "An invalid issuer. channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"],", "+ [f[\"type\"] for f in ADDITIONAL_LOGIN_FLOWS] self.assertCountEqual( [f[\"type\"] for f", "{\"type\": \"m.id.user\", \"user\": \"kermit\"}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\",", "retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) # Since we're", "\"authorization_endpoint\": \"https://issuer1/auth\", \"token_endpoint\": \"https://issuer1/token\", \"userinfo_endpoint\": \"https://issuer1/userinfo\", \"user_mapping_provider\": { \"config\": {\"localpart_template\":", "512` (not a secure way to generate real keys, but", "\"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I\", \"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb\", \"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0\", \"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m\", \"-----END RSA PRIVATE KEY-----\", ] )", "(eg via @override_config), don't replace it. if config.get(\"jwt_config\") is None:", "urlencode({b\"username\": b\"bobby\"}).encode(\"utf8\") chan = self.make_request( \"POST\", path=picker_url, content=content, content_is_form=True, custom_headers=[", "a valid # content-length header. (\"Content-Length\", str(len(content))), ], ) self.assertEqual(chan.code,", "Any]: config = super().default_config() # If jwt_config has been defined", "that the redirectUrl is correctly encoded in the service param", "self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_valid_unregistered(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"})", "params = { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\"", "login fails with the correct error code self.assertEqual(channel.code, 400) self.assertEqual(channel.json_body[\"errcode\"],", "d def test_username_picker(self) -> None: \"\"\"Test the happy path of", "for i in range(0, 6): params = { \"type\": \"m.login.password\",", "login flow serves a confirmation page before redirecting a user", "r\"@as2_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], }, )", "b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_no_token(self) -> None: params =", "longer than 512 characters device_id = \"yolo\" * 512 body", "self.make_request(\"GET\", \"/_matrix/client/r0/login\") self.assertEqual(channel.code, 200, channel.result) expected_flow_types = [ \"m.login.cas\", \"m.login.sso\",", "LOGIN_URL = b\"/_matrix/client/r0/login\" TEST_URL = b\"/_matrix/client/r0/account/whoami\" # a (valid) url", "\"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=cas\", shorthand=False, ) self.assertEqual(channel.code, 302,", "should have set a cookie including the redirect url cookie_headers", "and serve the template. channel = self.make_request(\"GET\", cas_ticket_url) # Because", "jwt HAS_JWT = True except ImportError: HAS_JWT = False #", "expired\" ) def test_login_jwt_not_before(self) -> None: now = int(time.time()) channel", "flow serves a confirmation page before redirecting a user to", "= jwt_privatekey) -> str: # PyJWT 2.0.0 changed the return", "def test_multi_sso_redirect_to_cas(self) -> None: \"\"\"If CAS is chosen, should redirect", "mocked_http_client = Mock(spec=[\"get_raw\"]) mocked_http_client.get_raw.side_effect = get_raw self.hs = self.setup_test_homeserver( config=config,", "bytes to str. result: Union[bytes, str] = jwt.encode(payload, secret, \"RS256\")", "= urllib.parse.parse_qsl( query, keep_blank_values=True, strict_parsing=True, errors=\"strict\" ) self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0],", "\"secret\": jwt_secret, \"algorithm\": jwt_algorithm, } def default_config(self) -> Dict[str, Any]:", "for i in range(0, 6): self.register_user(\"kermit\" + str(i), \"monkey\") for", "log in as a different device access_token_2 = self.login(\"kermit\", \"monkey\")", "a username picker flow.\"\"\" # do the start of the", "\"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # Now try to hard logout this", "providers enabled\"\"\" servlets = [ login.register_servlets, ] def default_config(self) ->", "\"secret\": self.jwt_pubkey, \"algorithm\": \"RS256\", } return config def jwt_encode(self, payload:", "redirect link self.assertEqual(len(p.links), 1) path, query = p.links[0].split(\"?\", 1) self.assertEqual(path,", "self.assertEqual(channel.code, 404, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_NOT_FOUND\") def test_client_idp_redirect_to_oidc(self) -> None: \"\"\"If", "-> None: self.register_user(\"kermit\", \"monkey\") for i in range(0, 6): params", "-> None: \"\"\"If the client pick a known IdP, redirect", "10000}, } } ) def test_POST_ratelimiting_per_account(self) -> None: self.register_user(\"kermit\", \"monkey\")", "params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_wrong_as(self) -> None:", "import Clock from tests import unittest from tests.handlers.test_oidc import HAS_OIDC", "=+\"', '\"fö&=o\"')] # (possibly experimental) login flows we expect to", "self.jwt_encode(*args)} channel = self.make_request(b\"POST\", LOGIN_URL, params) return channel def test_login_jwt_valid_registered(self)", "import Any, Dict, List, Optional, Union from unittest.mock import Mock", "self.hs.config.captcha.enable_registration_captcha = False return self.hs @override_config( { \"rc_login\": { \"address\":", "jwt_secret setting of synapse. Valid # tokens are signed by", "channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.another_service.token ) self.assertEqual(channel.result[\"code\"], b\"403\",", "\"POST\", path=picker_url, content=content, content_is_form=True, custom_headers=[ (\"Cookie\", \"username_mapping_session=\" + session_id), #", "self.reactor.advance(3600) channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "should error.\"\"\" redirect_url = \"https://legit-site.com/\" # First login (to create", "\"\"\" endpoint = \"/_matrix/client/r0/login/sso/redirect\" if idp_prov is not None: endpoint", "self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_no_token(self) -> None: \"\"\"Test that users", "a user to the redirect URL. \"\"\" base_url = \"/_matrix/client/r0/login/cas/ticket?redirectUrl\"", "test_login_jwt_valid_unregistered(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "@override_config({\"sso\": {\"client_whitelist\": [\"https://legit-site.com/\"]}}) def test_deactivated_user(self) -> None: \"\"\"Logging in as", ") def default_config(self) -> Dict[str, Any]: config = super().default_config() config[\"jwt_config\"]", "that an appservice user can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params", "bytes: \"\"\"Return an example response payload from a call to", "\"enabled\": True, \"server_url\": CAS_SERVER, } cas_user_id = \"username\" self.user_id =", "EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # finally, submit the matrix login token", "should redirect to an identity picker\"\"\" # first hit the", "uri = location_headers[0] # hitting that picker should give us", "from synapse.rest.client.account import WhoamiRestServlet from synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server", "a device_id longer than 512 characters device_id = \"yolo\" *", "should redirect to the OIDC auth endpoint\"\"\" # pick the", "(to create the user). self._test_redirect(redirect_url) # Deactivate the account. self.get_success(", "ADDITIONAL_LOGIN_FLOWS = [ {\"type\": \"m.login.application_service\"}, {\"type\": \"uk.half-shot.msc2778.login.application_service\"}, ] class LoginRestServletTestCase(unittest.HomeserverTestCase):", "login page of the cas server self.assertEqual(cas_uri_path, CAS_SERVER + \"/login\")", "urllib.parse.urlunparse(url_parts) # Get Synapse to call the fake CAS and", "token when using the appservice login method \"\"\" self.register_appservice_user(AS_USER, self.service.token)", "channel.result) def test_login_with_overly_long_device_id_fails(self) -> None: self.register_user(\"mickey\", \"<PASSWORD>\") # create a", "# Now try to hard log out all of the", "# check that the redirectUrl is correctly encoded in the", "\"<PASSWORD>\", \"device_id\": device_id, } # make a login request with", "errors=\"strict\" ) self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # finally, submit the", "self.make_request(b\"POST\", LOGIN_URL, params) return channel def test_login_jwt_valid(self) -> None: channel", "id not found in map\", ) session = username_mapping_sessions[session_id] self.assertEqual(session.remote_user_id,", "= urllib.parse.urlunparse(url_parts) # Get Synapse to call the fake CAS", "default OIDC provider config[\"oidc_config\"] = TEST_OIDC_CONFIG # additional OIDC providers", "None: \"\"\"Logging in as a deactivated account should error.\"\"\" redirect_url", "tests SYNAPSE_SERVER_PUBLIC_HOSTNAME = \"synapse\" # public_baseurl for some tests. It", "self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(", "\"requires jwt\") class JWTTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ]", "channel.result.get(\"headers\", []): if header[0] == b\"Content-Type\": content_type_header_value = header[1].decode(\"utf8\") self.assertTrue(content_type_header_value.startswith(\"text/html\"))", "minutes away expected_expiry = self.clock.time_msec() + (15 * 60 *", "need to set this manually as well \"account\": {\"per_second\": 10000,", "it should redirect us to the login page of the", "in macaroon.caveats: if caveat.caveat_id.startswith(prefix): return caveat.caveat_id[len(prefix) :] raise ValueError(\"No %s", "channel = self.make_request(b\"POST\", LOGIN_URL, params) if i == 5: self.assertEqual(channel.result[\"code\"],", "register from synapse.rest.client.account import WhoamiRestServlet from synapse.rest.synapse.client import build_synapse_client_resource_tree from", "# This key is used to sign tokens that shouldn't", "OF ANY KIND, either express or implied. # See the", "TEST_OIDC_AUTH_ENDPOINT) def _make_sso_redirect_request(self, idp_prov: Optional[str] = None) -> FakeChannel: \"\"\"Send", "\"aud\": \"test-audience\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid", "See the License for the specific language governing permissions and", "self.assertEqual(channel.result[\"code\"], b\"429\", channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [(\"<ab c>\", \"\"), ('q\" =+\"', '\"fö&=o\"')] # (possibly", "username_mapping_sessions[session_id] self.assertEqual(session.remote_user_id, \"tester\") self.assertEqual(session.display_name, \"Jonny\") self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL) # the expiry", "result: Union[str, bytes] = jwt.encode(payload, secret, self.jwt_algorithm) if isinstance(result, bytes):", "LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config( { \"rc_login\": { #", "= self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "HomeserverTestCase, override_config, skip_unless try: import jwt HAS_JWT = True except", "is &, %2B is + TEST_CLIENT_REDIRECT_URL = 'https://x?<ab c>&q\"+%3D%2B\"=\"fö%26=o\"' #", "\"sp_config\": { \"metadata\": {\"inline\": [TEST_SAML_METADATA]}, # use the XMLSecurity backend", "to in writing, software # distributed under the License is", "self.make_request(\"GET\", cas_ticket_url) # Because the user is deactivated they are", "'JWT validation failed: Token is missing the \"iss\" claim', )", "jwt_login(self, *args: Any) -> FakeChannel: params = {\"type\": \"org.matrix.login.jwt\", \"token\":", "TEST_URL = b\"/_matrix/client/r0/account/whoami\" # a (valid) url with some annoying", "awaits on it. \"\"\" return ( \"\"\" <cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'> <cas:authenticationSuccess>", "just like jwt_privatekey. bad_privatekey = \"\\n\".join( [ \"-----BEGIN RSA PRIVATE", "assert location_headers # send a request to the completion page,", "to cas_uri_params = urllib.parse.parse_qs(cas_uri_query) service_uri = cas_uri_params[\"service\"][0] _, service_uri_query =", "url, which should redirect to our idp picker channel =", "or agreed to in writing, software # distributed under the", "self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers saml_uri =", "about 15 minutes away expected_expiry = self.clock.time_msec() + (15 *", "self.jwt_login({\"sub\": \"frog\"}, self.bad_privatekey) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"],", "served an error template. self.assertEqual(channel.code, 403) self.assertIn(b\"SSO account deactivated\", channel.result[\"body\"])", "b\"403\", channel.result) def test_login_appservice_wrong_as(self) -> None: \"\"\"Test that as users", "config[\"oidc_config\"] = {} config[\"oidc_config\"].update(TEST_OIDC_CONFIG) config[\"oidc_config\"][\"user_mapping_provider\"] = { \"config\": {\"display_name_template\": \"{{", "should redirect us to the login page of the cas", "can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE,", "a confirmation page self.assertEqual(channel.code, 200, channel.result) content_type_headers = channel.headers.getRawHeaders(\"Content-Type\") assert", "experimental) login flows we expect to appear in the list", "TestHtmlParser() p.feed(channel.text_body) p.close() # ... which should contain our redirect", "= list(urllib.parse.urlparse(base_url)) query = dict(urllib.parse.parse_qsl(url_parts[4])) query.update({\"redirectUrl\": redirect_url}) query.update({\"ticket\": \"ticket\"}) url_parts[4]", "1000.0) params = { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\":", "https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20 This needs to be returned by an async function", "+ urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=cas\", shorthand=False, ) self.assertEqual(channel.code, 302, channel.result) location_headers", "Generated with `openssl rsa -in foo.key -pubout`, with the the", "\"burst_count\": 5}, # Prevent the account login ratelimiter from raising", "\"token_endpoint\": \"https://issuer1/token\", \"userinfo_endpoint\": \"https://issuer1/userinfo\", \"user_mapping_provider\": { \"config\": {\"localpart_template\": \"{{ user.sub", "Clock, hs: HomeServer) -> None: self.deactivate_account_handler = hs.get_deactivate_account_handler() def test_cas_redirect_confirm(self)", "None: endpoint += \"/\" + idp_prov endpoint += \"?redirectUrl=\" +", "config[\"oidc_providers\"] = [ { \"idp_id\": \"idp1\", \"idp_name\": \"IDP1\", \"discover\": False,", "404, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_NOT_FOUND\") def test_client_idp_redirect_to_oidc(self) -> None: \"\"\"If the", "different users so we're sure not to be bothered by", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_user_bot(self) -> None: \"\"\"Test that the", "our idp picker channel = self._make_sso_redirect_request(None) self.assertEqual(channel.code, 302, channel.result) location_headers", "\"XMLSecurity\", }, } # default OIDC provider config[\"oidc_config\"] = TEST_OIDC_CONFIG", "<cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'> <cas:authenticationSuccess> <cas:user>%s</cas:user> <cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket> <cas:proxies> <cas:proxy>https://proxy2/pgtUrl</cas:proxy> <cas:proxy>https://proxy1/pgtUrl</cas:proxy> </cas:proxies> </cas:authenticationSuccess>", "compliance with the License. # You may obtain a copy", "\"user\": AS_USER}, } channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.another_service.token", "b\"POST\", LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_user_bot(self)", "tokens # signed by the private key. @skip_unless(HAS_JWT, \"requires jwt\")", "10000, \"burst_count\": 10000}, } } ) def test_POST_ratelimiting_per_address(self) -> None:", "the given redirect URL.\"\"\" cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url))", "\"monkey\") # we shouldn't be able to make requests without", "of SSO login\"\"\" servlets = [login.register_servlets] def default_config(self) -> Dict[str,", "governing permissions and # limitations under the License. import json", "it's a UI-Auth fail self.assertEqual( set(channel.json_body.keys()), {\"flows\", \"params\", \"session\"}, channel.result,", "location_headers[0] oidc_uri_path, oidc_uri_query = oidc_uri.split(\"?\", 1) # it should redirect", "= True except ImportError: HAS_JWT = False # synapse server", "\"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN\", \"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA\", \"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=\", \"-----END RSA PRIVATE KEY-----\", ] ) #", "\"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation", "on it. \"\"\" return ( \"\"\" <cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'> <cas:authenticationSuccess> <cas:user>%s</cas:user>", "params) return channel def test_login_jwt_valid(self) -> None: channel = self.jwt_login({\"sub\":", "redirect us to the auth page of the OIDC server", "not None: endpoint += \"/\" + idp_prov endpoint += \"?redirectUrl=\"", "= jwt.encode(payload, secret, self.jwt_algorithm) if isinstance(result, bytes): return result.decode(\"ascii\") return", "make requests with the access token channel = self.make_request(b\"GET\", TEST_URL,", "\"<KEY>\", \"<KEY> \"-----END PUBLIC KEY-----\", ] ) # This key", "additional OIDC providers config[\"oidc_providers\"] = [ { \"idp_id\": \"idp1\", \"idp_name\":", "None: channel = self.jwt_login({\"sub\": \"frog\"}, self.bad_privatekey) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "# https://github.com/matrix-org/synapse/issues/5665 # \"identifier\": {\"type\": \"m.id.user\", \"user\": user_id}, \"user\": user_id,", "hs.get_deactivate_account_handler() def test_cas_redirect_confirm(self) -> None: \"\"\"Tests that the SSO login", "channel.result) @override_config( { \"rc_login\": { # Prevent the address login", "\"@kermit:test\") # An invalid issuer. channel = self.jwt_login({\"sub\": \"kermit\", \"iss\":", "{ \"rc_login\": { \"address\": {\"per_second\": 0.17, \"burst_count\": 5}, # Prevent", "and device id. login_token = params[2][1] chan = self.make_request( \"POST\",", "secret: str = jwt_privatekey) -> str: # PyJWT 2.0.0 changed", "not use this file except in compliance with the License.", "self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"subject_claim\": \"username\"}}) def test_login_custom_sub(self) -> None:", "must provide a token when using the appservice login method", "self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], False) def _delete_device( self, access_token: str, user_id:", "jwt.encode(payload, secret, \"RS256\") if isinstance(result, bytes): return result.decode(\"ascii\") return result", "the list after the normal # ones ADDITIONAL_LOGIN_FLOWS = [", "= False return self.hs @override_config( { \"rc_login\": { \"address\": {\"per_second\":", "= channel.result[\"body\"].decode(\"utf-8\") p = TestHtmlParser() p.feed(html) p.close() # there should", "self._test_redirect(\"https://example.com/_matrix/static/client/login\") def _test_redirect(self, redirect_url: str) -> None: \"\"\"Tests that the", "\"monkey\") # log in as normal access_token = self.login(\"kermit\", \"monkey\")", "you may not use this file except in compliance with", "] ) # Generated with `openssl rsa -in foo.key -pubout`,", "\"\"\"If the client tries to pick an unknown IdP, return", "\"/logout/all\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_with_overly_long_device_id_fails(self) -> None: self.register_user(\"mickey\",", "\"m.login.password\", ] + [f[\"type\"] for f in ADDITIONAL_LOGIN_FLOWS] self.assertCountEqual( [f[\"type\"]", "TEST_CLIENT_REDIRECT_URL = 'https://x?<ab c>&q\"+%3D%2B\"=\"fö%26=o\"' # the query params in TEST_CLIENT_REDIRECT_URL", "# signed by the private key. @skip_unless(HAS_JWT, \"requires jwt\") class", "def create_resource_dict(self) -> Dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return", "flows = {flow[\"type\"]: flow for flow in channel.json_body[\"flows\"]} self.assertCountEqual( flows[\"m.login.sso\"][\"identity_providers\"],", "test_login_jwt_invalid_signature(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"}, \"notsecret\") self.assertEqual(channel.result[\"code\"], b\"403\",", "device_id, access_token=access_token ) self.assertEqual(channel.code, 401, channel.result) # check it's a", "soft-logout self.reactor.advance(3600) channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result)", "self.jwt_login({\"sub\": \"kermit\", \"aud\": \"test-audience\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") #", "= \"\\n\".join( [ \"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\", \"<KEY>\", \"<KEY>KVaZ/gTOM9+9MwlmhidrUOweKfB/\",", "def test_login_appservice_no_token(self) -> None: \"\"\"Test that users must provide a", "\"as_user_alice\" class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, register.register_servlets, ] def", "An invalid audience. channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"],", "# serving a confirmation page config[\"sso\"] = {\"client_whitelist\": [\"https://x\"]} return", "Dict[str, Any]: config = super().default_config() config[\"jwt_config\"] = { \"enabled\": True,", "we're ratelimiting at 1 request/min, retry_after_ms should be lower #", "-> FakeChannel: params = {\"type\": \"org.matrix.login.jwt\", \"token\": self.jwt_encode(*args)} channel =", "channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers cas_uri = location_headers[0] cas_uri_path,", "BASE_URL config[\"cas_config\"] = { \"enabled\": True, \"server_url\": CAS_SERVER, \"service_url\": \"https://matrix.goodserver.com:8448\",", "RSA PRIVATE KEY-----\", \"<KEY>\", \"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L\", \"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY\", \"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I\", \"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb\", \"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0\", \"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m\",", "self.helper.complete_oidc_auth(oidc_uri, cookies, {\"sub\": \"user1\"}) # that should serve a confirmation", "some tests CAS_SERVER = \"https://fake.test\" # just enough to tell", "has expired\" ) def test_login_jwt_not_before(self) -> None: now = int(time.time())", "\"address\": {\"per_second\": 10000, \"burst_count\": 10000}, } } ) def test_POST_ratelimiting_per_account(self)", "a cookie including the redirect url cookie_headers = channel.headers.getRawHeaders(\"Set-Cookie\") assert", "= self.setup_test_homeserver() self.service = ApplicationService( id=\"unique_identifier\", token=\"some_token\", hostname=\"example.com\", sender=\"@asbot:example.com\", namespaces={", "confirmation page before redirecting a user to the redirect URL.", "\"IDP1\", \"discover\": False, \"issuer\": \"https://issuer1\", \"client_id\": \"test-client-id\", \"client_secret\": \"test-client-secret\", \"scopes\":", "# matrix access token, mxid, and device id. chan =", "\"iss\" claim', ) def test_login_iss_no_config(self) -> None: \"\"\"Test providing an", "b\"200\", channel.result) @override_config( { \"rc_login\": { \"account\": {\"per_second\": 0.17, \"burst_count\":", "error code self.assertEqual(channel.code, 400) self.assertEqual(channel.json_body[\"errcode\"], \"M_INVALID_PARAM\") @skip_unless(has_saml2 and HAS_OIDC, \"Requires", "\"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Token field for JWT is missing\") # The", "by synapse. # Generated just like jwt_privatekey. bad_privatekey = \"\\n\".join(", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "{ \"metadata\": {\"inline\": [TEST_SAML_METADATA]}, # use the XMLSecurity backend to", "cas_user_id = \"username\" self.user_id = \"@%s:test\" % cas_user_id async def", "query = p.links[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") # it will have", "validation failed: Token is missing the \"aud\" claim', ) def", "of a username picker flow.\"\"\" # do the start of", "failed: Invalid audience\" ) def test_login_default_sub(self) -> None: \"\"\"Test reading", "correct error code self.assertEqual(channel.code, 400) self.assertEqual(channel.json_body[\"errcode\"], \"M_INVALID_PARAM\") @skip_unless(has_saml2 and HAS_OIDC,", "audience\" ) # Not providing an audience. channel = self.jwt_login({\"sub\":", "user_id: str, password: str, device_id: str ) -> None: \"\"\"Perform", "be lower # than 1min. self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms /", "expected_expiry = self.clock.time_msec() + (15 * 60 * 1000) self.assertApproximates(session.expiry_time_ms,", "jwt\") class JWTPubKeyTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, ] # This", "\"m.id.user\", \"user\": \"kermit\"}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL,", "chan = self.make_request( \"GET\", path=location_headers[0], custom_headers=[(\"Cookie\", \"username_mapping_session=\" + session_id)], )", "\" for caveat in macaroon.caveats: if caveat.caveat_id.startswith(prefix): return caveat.caveat_id[len(prefix) :]", "so we're sure not to be bothered by the per-user", "from twisted.test.proto_helpers import MemoryReactor from twisted.web.resource import Resource import synapse.rest.admin", "\"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"audiences\":", "<cas:authenticationSuccess> <cas:user>%s</cas:user> <cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket> <cas:proxies> <cas:proxy>https://proxy2/pgtUrl</cas:proxy> <cas:proxy>https://proxy1/pgtUrl</cas:proxy> </cas:proxies> </cas:authenticationSuccess> </cas:serviceResponse> \"\"\"", "self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_invalid_signature(self) ->", "= \"\"\" <md:EntityDescriptor xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\"> <md:IDPSSODescriptor protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\"> <md:SingleSignOnService Binding=\"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect\" Location=\"%(SAML_SERVER)s\"/> </md:IDPSSODescriptor>", "c>\", \"\"), ('q\" =+\"', '\"fö&=o\"')] # (possibly experimental) login flows", "JWT is missing\") # The JWTPubKeyTestCase is a complement to", "page content = urlencode({b\"username\": b\"bobby\"}).encode(\"utf8\") chan = self.make_request( \"POST\", path=picker_url,", "mocked_http_client.get_raw.side_effect = get_raw self.hs = self.setup_test_homeserver( config=config, proxied_http_client=mocked_http_client, ) return", "\"24h\"}) def test_session_can_hard_logout_after_being_soft_logged_out(self) -> None: self.register_user(\"kermit\", \"monkey\") # log in", "channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers uri = location_headers[0] #", "# https://.... BASE_URL = \"http://%s/\" % (SYNAPSE_SERVER_PUBLIC_HOSTNAME,) # CAS server", "URI so we redirect straight to it rather than #", "= super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def test_get_login_flows(self) -> None: \"\"\"GET", "TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) #", "that the SSO login flow serves a confirmation page before", "of jwt.encode from bytes to str. result: Union[str, bytes] =", "is correctly encoded in the service param - ie, the", "channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid audience. channel = self.jwt_login({\"sub\":", "If jwt_config has been defined (eg via @override_config), don't replace", "-> None: self.register_user(\"kermit\", \"monkey\") channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\",", "channel.result) @override_config( { \"rc_login\": { \"account\": {\"per_second\": 0.17, \"burst_count\": 5},", "ratelimiting at 1 request/min, retry_after_ms should be lower # than", "self.jwt_pubkey, \"algorithm\": \"RS256\", } return config def jwt_encode(self, payload: Dict[str,", "tests.server import FakeChannel from tests.test_utils.html_parsers import TestHtmlParser from tests.unittest import", "None: \"\"\"Test that users must provide a token when using", "{\"per_second\": 0.17, \"burst_count\": 5}, # Prevent the account login ratelimiter", "@skip_unless(has_saml2 and HAS_OIDC, \"Requires SAML2 and OIDC\") class MultiSSOTestCase(unittest.HomeserverTestCase): \"\"\"Tests", "the expiry time should be about 15 minutes away expected_expiry", "\"monkey\", device_id) channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result)", "caveat in macaroon\" % (key,)) class CASTestCase(unittest.HomeserverTestCase): servlets = [", "the appservice login method \"\"\" self.register_appservice_user(AS_USER, self.service.token) params = {", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_valid_unregistered(self) -> None: channel", "\"-----END RSA PRIVATE KEY-----\", ] ) def default_config(self) -> Dict[str,", "OIDC\") class UsernamePickerTestCase(HomeserverTestCase): \"\"\"Tests for the username picker flow of", "params) if i == 5: self.assertEqual(channel.result[\"code\"], b\"429\", channel.result) retry_after_ms =", "\"\"\"Test that as users cannot login with wrong as token\"\"\"", "that users must provide a token when using the appservice", "test_login_aud_no_config(self) -> None: \"\"\"Test providing an audience without requiring it", "True, \"secret\": self.jwt_pubkey, \"algorithm\": \"RS256\", } return config def jwt_encode(self,", "# hitting that picker should give us some HTML channel", "\"user\": \"fibble_wibble\"}, } channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token", "endpoint\"\"\" # pick the default OIDC provider channel = self.make_request(", "self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) # ... and should have set a cookie", "with the correct error code self.assertEqual(channel.code, 400) self.assertEqual(channel.json_body[\"errcode\"], \"M_INVALID_PARAM\") @skip_unless(has_saml2", "400, channel.result) def test_client_idp_redirect_to_unknown(self) -> None: \"\"\"If the client tries", "\"\"\"Test that an appservice user can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token)", "\"\"\"Test that the appservice bot can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token)", "FakeChannel: \"\"\"Send a request to /_matrix/client/r0/login/sso/redirect ... possibly specifying an", "server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) # ... and should have set a", "True, \"server_url\": CAS_SERVER, } cas_user_id = \"username\" self.user_id = \"@%s:test\"", "enough for tests!) jwt_privatekey = \"\\n\".join( [ \"-----BEGIN RSA PRIVATE", "key's pubkey is used as the jwt_secret setting of synapse.", "i in range(0, 6): self.register_user(\"kermit\" + str(i), \"monkey\") for i", "JWTPubKeyTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, ] # This key's pubkey", "return channel def test_login_jwt_valid(self) -> None: channel = self.jwt_login({\"sub\": \"kermit\"})", "<cas:user>%s</cas:user> <cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket> <cas:proxies> <cas:proxy>https://proxy2/pgtUrl</cas:proxy> <cas:proxy>https://proxy1/pgtUrl</cas:proxy> </cas:proxies> </cas:authenticationSuccess> </cas:serviceResponse> \"\"\" %", "-> None: channel = self.jwt_login({\"sub\": \"frog\"}, \"notsecret\") self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", ") def test_login_aud_no_config(self) -> None: \"\"\"Test providing an audience without", "% (urllib.parse.quote(redirect_url)) ) # Get Synapse to call the fake", "be a link for each href returned_idps: List[str] = []", "provide a token when using the appservice login method \"\"\"", "MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.deactivate_account_handler = hs.get_deactivate_account_handler()", "SSO flows\"\"\" channel = self.make_request(\"GET\", \"/_matrix/client/r0/login\") self.assertEqual(channel.code, 200, channel.result) expected_flow_types", "assert location_headers cas_uri = location_headers[0] cas_uri_path, cas_uri_query = cas_uri.split(\"?\", 1)", "make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.base_url = \"https://matrix.goodserver.com/\"", "self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def", "\"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # ... but if we delete that", "= self.login(\"kermit\", \"monkey\") # more requests with the expired token", "return password and SSO flows\"\"\" channel = self.make_request(\"GET\", \"/_matrix/client/r0/login\") self.assertEqual(channel.code,", "class JWTTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ] jwt_secret =", "body isn't empty. self.assertTrue(len(channel.result[\"body\"]) > 0) # And that it", "self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_soft_logout(self)", "the # place that CAS will redirect to cas_uri_params =", "in map\", ) session = username_mapping_sessions[session_id] self.assertEqual(session.remote_user_id, \"tester\") self.assertEqual(session.display_name, \"Jonny\")", "# Now try to hard logout this session channel =", "for header in channel.result.get(\"headers\", []): if header[0] == b\"Content-Type\": content_type_header_value", "scheme because # FakeChannel.isSecure() returns False, so synapse will see", "self.hs @override_config( { \"rc_login\": { \"address\": {\"per_second\": 0.17, \"burst_count\": 5},", "to str. result: Union[bytes, str] = jwt.encode(payload, secret, \"RS256\") if", "login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": self.service.sender}, } channel = self.make_request(", "self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid issuer. channel = self.jwt_login({\"sub\": \"kermit\",", "{} channel.extract_cookies(cookies) self.assertIn(\"username_mapping_session\", cookies) session_id = cookies[\"username_mapping_session\"] # introspect the", "= \"HS256\" base_config = { \"enabled\": True, \"secret\": jwt_secret, \"algorithm\":", "appservice bot can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = {", "self.assertEqual(picker_url, \"/_synapse/client/pick_username/account_details\") # ... with a username_mapping_session cookie cookies: Dict[str,", "FakeChannel.isSecure() returns False, so synapse will see the requested uri", "= {} channel.extract_cookies(cookies) self.assertIn(\"username_mapping_session\", cookies) session_id = cookies[\"username_mapping_session\"] # introspect", "import urlencode import pymacaroons from twisted.test.proto_helpers import MemoryReactor from twisted.web.resource", "test_login_no_token(self) -> None: params = {\"type\": \"org.matrix.login.jwt\"} channel = self.make_request(b\"POST\",", "delete that device, it will be a proper logout self._delete_device(access_token_2,", "redirect URL path, query = location_headers[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") #", "\"type\": \"m.login.password\", \"user\": \"mickey\", \"password\": \"<PASSWORD>\", \"device_id\": device_id, } #", "client redirectUrl chan = self.make_request( \"GET\", path=location_headers[0], custom_headers=[(\"Cookie\", \"username_mapping_session=\" +", "it. \"\"\" return ( \"\"\" <cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'> <cas:authenticationSuccess> <cas:user>%s</cas:user> <cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket>", "# The JWTPubKeyTestCase is a complement to JWTTestCase where we", "= self.jwt_login({\"sub\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_jwt_invalid_signature(self)", "KEY-----\", ] ) # This key is used to sign", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "completion page, which should 302 to the client redirectUrl chan", "content_type_headers self.assertTrue(content_type_headers[-1].startswith(\"text/html\")) p = TestHtmlParser() p.feed(channel.text_body) p.close() # ... which", "Deactivate the account. self.get_success( self.deactivate_account_handler.deactivate_account( self.user_id, False, create_requester(self.user_id) ) )", "synapse.types import create_requester from synapse.util import Clock from tests import", "assert location_headers self.assertEqual(location_headers[0][: len(redirect_url)], redirect_url) @override_config({\"sso\": {\"client_whitelist\": [\"https://legit-site.com/\"]}}) def test_deactivated_user(self)", "user.displayname }}\"} } # whitelist this client URI so we", "synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, logout.register_servlets, devices.register_servlets, lambda hs, http_server: WhoamiRestServlet(hs).register(http_server), ] def", "{\"id\": \"oidc-idp1\", \"name\": \"IDP1\"}, {\"id\": \"oidc\", \"name\": \"OIDC\"}, ], )", "\"\"\"If CAS is chosen, should redirect to the CAS server\"\"\"", "these values to 10000, but as we're overriding the entire", "KEY-----\", \"<KEY>\", \"<KEY> \"-----END PUBLIC KEY-----\", ] ) # This", "self.make_request(\"GET\", uri) self.assertEqual(channel.code, 200, channel.result) # parse the form to", "value = h.split(\";\")[0].split(\"=\", maxsplit=1) cookies[key] = value oidc_session_cookie = cookies[\"oidc_session\"]", "\"algorithm\": \"RS256\", } return config def jwt_encode(self, payload: Dict[str, Any],", "the private key. @skip_unless(HAS_JWT, \"requires jwt\") class JWTPubKeyTestCase(unittest.HomeserverTestCase): servlets =", "proxied_http_client=mocked_http_client, ) return self.hs def prepare(self, reactor: MemoryReactor, clock: Clock,", ") self.assertEqual(channel.code, 400, channel.result) def test_client_idp_redirect_to_unknown(self) -> None: \"\"\"If the", "this manually as well \"address\": {\"per_second\": 10000, \"burst_count\": 10000}, \"failed_attempts\":", "self.make_request(\"GET\", cas_ticket_url) # Test that the response is HTML. self.assertEqual(channel.code,", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid audience. channel", "-> None: \"\"\"Test that non-as users cannot login with the", "401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # # test behaviour", "user's sessions channel = self.make_request(b\"POST\", \"/logout/all\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "{**base_config, \"subject_claim\": \"username\"}}) def test_login_custom_sub(self) -> None: \"\"\"Test reading user", "ID from the default subject claim.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\"})", "start of the login flow channel = self.helper.auth_via_oidc( {\"sub\": \"tester\",", "\"<KEY>\", \"<KEY>\", \"<KEY>KVaZ/gTOM9+9MwlmhidrUOweKfB/\", \"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq\", \"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN\", \"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA\", \"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=\", \"-----END RSA PRIVATE", "def test_deactivated_user(self) -> None: \"\"\"Logging in as a deactivated account", "\"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) if i ==", "file except in compliance with the License. # You may", "\"<PASSWORD>\") # create a device_id longer than 512 characters device_id", "redirect to # https://.... BASE_URL = \"http://%s/\" % (SYNAPSE_SERVER_PUBLIC_HOSTNAME,) #", "def test_soft_logout(self) -> None: self.register_user(\"kermit\", \"monkey\") # we shouldn't be", "possibly specifying an IDP provider \"\"\" endpoint = \"/_matrix/client/r0/login/sso/redirect\" if", "\"\"\"Tests that the SSO login flow serves a redirect for", "returned location matches the requested redirect URL path, query =", "# private key placed in foo.key (jwt_privatekey). jwt_pubkey = \"\\n\".join(", "return self.hs def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer)", "this manually as well \"account\": {\"per_second\": 10000, \"burst_count\": 10000}, }", "urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=cas\", shorthand=False, ) self.assertEqual(channel.code, 302, channel.result) location_headers =", "True except ImportError: HAS_JWT = False # synapse server name:", "= int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) # Since we're ratelimiting", "self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 200, channel.result) # time passes self.reactor.advance(24", "10000, but as we're overriding the entire # rc_login dict", "normal params = { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\":", "location_headers[0] # hitting that picker should give us some HTML", "have to parse them params = urllib.parse.parse_qsl( query, keep_blank_values=True, strict_parsing=True,", "has fields assumed elsewhere in this class html = channel.result[\"body\"].decode(\"utf-8\")", "self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms / 1000.0 + 1.0) params =", "redirect url cookie_headers = channel.headers.getRawHeaders(\"Set-Cookie\") assert cookie_headers cookies: Dict[str, str]", "@override_config), don't replace it. if config.get(\"jwt_config\") is None: config[\"jwt_config\"] =", "400) self.assertEqual(channel.json_body[\"errcode\"], \"M_INVALID_PARAM\") @skip_unless(has_saml2 and HAS_OIDC, \"Requires SAML2 and OIDC\")", "401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # Now try to", "Clock from tests import unittest from tests.handlers.test_oidc import HAS_OIDC from", "that as users cannot login with wrong as token\"\"\" self.register_appservice_user(AS_USER,", "\"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid audience\" ) def", "redirectUrl chan = self.make_request( \"GET\", path=location_headers[0], custom_headers=[(\"Cookie\", \"username_mapping_session=\" + session_id)],", "# limitations under the License. import json import time import", "\"test-issuer\"}}) def test_login_iss(self) -> None: \"\"\"Test validating the issuer claim.\"\"\"", "-> None: channel = self.jwt_login({\"sub\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"],", "super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def test_username_picker(self) -> None: \"\"\"Test the", "\"aud\" claim', ) def test_login_aud_no_config(self) -> None: \"\"\"Test providing an", "\"\\n\".join( [ \"-----BEGIN PUBLIC KEY-----\", \"<KEY>\", \"<KEY> \"-----END PUBLIC KEY-----\",", "\"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA\", \"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=\", \"-----END RSA PRIVATE KEY-----\", ] ) # Generated", "which should redirect to our idp picker channel = self._make_sso_redirect_request(None)", "login, logout, register from synapse.rest.client.account import WhoamiRestServlet from synapse.rest.synapse.client import", "\"Jonny\"}, TEST_CLIENT_REDIRECT_URL ) # that should redirect to the username", "uri as # http://..., so using http in the public_baseurl", "language governing permissions and # limitations under the License. import", "public_baseurl stops Synapse trying to redirect to # https://.... BASE_URL", "call the fake CAS and serve the template. channel =", "= \"as_user_alice\" class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, register.register_servlets, ]", "access_token=access_token) self.assertEqual(channel.code, 200, channel.result) # time passes self.reactor.advance(24 * 3600)", "each href returned_idps: List[str] = [] for link in p.links:", "the SAML server self.assertEqual(saml_uri_path, SAML_SERVER) # the RelayState is used", "\"nbf\": now + 3600}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(", "], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], }, ) self.hs.get_datastores().main.services_cache.append(self.service) self.hs.get_datastores().main.services_cache.append(self.another_service) return", "we need to set this manually as well \"account\": {\"per_second\":", "response is HTML. self.assertEqual(channel.code, 200, channel.result) content_type_header_value = \"\" for", "now be able to make requests with the access token", "HAS_JWT = True except ImportError: HAS_JWT = False # synapse", "login flow serves a redirect for the given redirect URL.\"\"\"", "channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\":", "\"-----END RSA PRIVATE KEY-----\", ] ) # Generated with `openssl", "self.register_user(\"kermit\", \"monkey\") # we shouldn't be able to make requests", "params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": \"fibble_wibble\"},", "-> None: \"\"\"If SAML is chosen, should redirect to the", "self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=oidc\", ) self.assertEqual(channel.code, 302,", "OIDC\") class MultiSSOTestCase(unittest.HomeserverTestCase): \"\"\"Tests for homeservers with multiple SSO providers", "as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\":", "the mock's return value) because the corresponding Synapse code awaits", "self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], False) def _delete_device( self,", "+ str(i)}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params)", "self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) @skip_unless(HAS_OIDC, \"requires OIDC\") class", "channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=cas\", shorthand=False,", "{ \"config\": {\"localpart_template\": \"{{ user.sub }}\"} }, } ] return", "-> None: \"\"\"Test reading user ID from a custom subject", "TEST_CLIENT_REDIRECT_URL) # the expiry time should be about 15 minutes", "\"oidc\", \"oidc-idp1\", \"saml\"]) def test_multi_sso_redirect_to_cas(self) -> None: \"\"\"If CAS is", "} } ) def test_cas_redirect_whitelisted(self) -> None: \"\"\"Tests that the", "def test_login_iss_no_config(self) -> None: \"\"\"Test providing an issuer claim without", "cas_uri_params[\"service\"][0] _, service_uri_query = service_uri.split(\"?\", 1) service_uri_params = urllib.parse.parse_qs(service_uri_query) self.assertEqual(service_uri_params[\"redirectUrl\"][0],", "KIND, either express or implied. # See the License for", "in the service param - ie, the # place that", "redirect_url) @override_config({\"sso\": {\"client_whitelist\": [\"https://legit-site.com/\"]}}) def test_deactivated_user(self) -> None: \"\"\"Logging in", "None: \"\"\"Test providing an audience without requiring it in the", "channel.json_body[\"error\"], \"JWT validation failed: Signature has expired\" ) def test_login_jwt_not_before(self)", "'JWT validation failed: Token is missing the \"aud\" claim', )", "from https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20 This needs to be returned by an async", "self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL) # the expiry time should be about 15", "self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=saml\", ) self.assertEqual(channel.code, 302,", "\"HS256\" base_config = { \"enabled\": True, \"secret\": jwt_secret, \"algorithm\": jwt_algorithm,", "channel = self.make_request(\"GET\", \"/_matrix/client/r0/login\") self.assertEqual(channel.code, 200, channel.result) expected_flow_types = [", "Dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def test_username_picker(self)", "\"M_MISSING_TOKEN\") # log in as normal params = { \"type\":", "picker, which should serve a redirect # to the completion", "KEY-----\", \"<KEY>\", \"<KEY>\", \"<KEY>KVaZ/gTOM9+9MwlmhidrUOweKfB/\", \"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq\", \"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN\", \"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA\", \"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=\", \"-----END RSA", "set a cookie including the redirect url cookie_headers = channel.headers.getRawHeaders(\"Set-Cookie\")", "be about 15 minutes away expected_expiry = self.clock.time_msec() + (15", "the username picker self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert", "[TEST_SAML_METADATA]}, # use the XMLSecurity backend to avoid relying on", "%3D is =, %26 is &, %2B is + TEST_CLIENT_REDIRECT_URL", "= \"https://legit-site.com/\" # First login (to create the user). self._test_redirect(redirect_url)", "fields assumed elsewhere in this class html = channel.result[\"body\"].decode(\"utf-8\") p", "configured in synapse as \"jwt_secret\", and tokens # signed by", "token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\":", "+= \"/\" + idp_prov endpoint += \"?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) return", "redirect for the given redirect URL.\"\"\" cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\"", "\"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"],", "old versions of twisted don't do form-parsing without a valid", "test_client_idp_redirect_to_unknown(self) -> None: \"\"\"If the client tries to pick an", ") # Get Synapse to call the fake CAS and", "login ratelimiter from raising first # # This is normally", "test_session_can_hard_logout_after_being_soft_logged_out(self) -> None: self.register_user(\"kermit\", \"monkey\") # log in as normal", "# Create different users so we're sure not to be", "\"https://other-site.com/\", ] } } ) def test_cas_redirect_whitelisted(self) -> None: \"\"\"Tests", "self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) def _make_sso_redirect_request(self, idp_prov: Optional[str] = None) -> FakeChannel:", "(the \"License\"); # you may not use this file except", "key: str) -> str: prefix = key + \" =", "configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "< 6000) self.reactor.advance(retry_after_ms / 1000.0 + 1.0) params = {", "None: \"\"\"If the client pick a known IdP, redirect to", "self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Signature verification failed\",", "\"Invalid JWT\") @override_config({\"jwt_config\": {**base_config, \"issuer\": \"test-issuer\"}}) def test_login_iss(self) -> None:", "{\"type\": \"uk.half-shot.msc2778.login.application_service\"}, ] class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets,", "hard logout this session channel = self.make_request(b\"POST\", \"/logout\", access_token=access_token) self.assertEqual(channel.result[\"code\"],", "0) # And that it contains our redirect link self.assertIn(redirect_url,", "self.assertTrue(len(channel.result[\"body\"]) > 0) # And that it contains our redirect", "\"/_matrix/client/r0/login/cas/ticket?redirectUrl\" redirect_url = \"https://dodgy-site.com/\" url_parts = list(urllib.parse.urlparse(base_url)) query = dict(urllib.parse.parse_qsl(url_parts[4]))", "ADDITIONAL_LOGIN_FLOWS] self.assertCountEqual( [f[\"type\"] for f in channel.json_body[\"flows\"]], expected_flow_types ) flows", "should redirect to the SAML server\"\"\" channel = self.make_request( \"GET\",", "should redirect us to the auth page of the OIDC", "cas_uri_params = urllib.parse.parse_qs(cas_uri_query) service_uri = cas_uri_params[\"service\"][0] _, service_uri_query = service_uri.split(\"?\",", "return result.decode(\"ascii\") return result def jwt_login(self, *args: Any) -> FakeChannel:", "SSO login flow serves a redirect for the given redirect", "And that it contains our redirect link self.assertIn(redirect_url, channel.result[\"body\"].decode(\"UTF-8\")) @override_config(", "channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"test-issuer\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"],", "_delete_device( self, access_token: str, user_id: str, password: str, device_id: str", "def test_username_picker(self) -> None: \"\"\"Test the happy path of a", "b\"429\", channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) #", "self.assertEqual(session.display_name, \"Jonny\") self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL) # the expiry time should be", "= \"https://matrix.goodserver.com/\" self.redirect_path = \"_synapse/client/login/sso/redirect/confirm\" config = self.default_config() config[\"public_baseurl\"] =", "the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) def _make_sso_redirect_request(self, idp_prov: Optional[str] =", "def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_privatekey) ->", "[ {\"regex\": r\"@as_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [],", "client URI so we redirect straight to it rather than", "self.assertEqual(channel.code, 200, channel.result) access_token = channel.json_body[\"access_token\"] device_id = channel.json_body[\"device_id\"] #", "request to /_matrix/client/r0/login/sso/redirect ... possibly specifying an IDP provider \"\"\"", "# # Unless required by applicable law or agreed to", "None: \"\"\"GET /login should return password and SSO flows\"\"\" channel", "self.assertEqual(chan.json_body[\"user_id\"], \"@user1:test\") def test_multi_sso_redirect_to_unknown(self) -> None: \"\"\"An unknown IdP should", "a UI-Auth fail self.assertEqual( set(channel.json_body.keys()), {\"flows\", \"params\", \"session\"}, channel.result, )", "which gives us our # matrix access token, mxid, and", "should contain our redirect link self.assertEqual(len(p.links), 1) path, query =", "the License. import json import time import urllib.parse from typing", "used in some tests CAS_SERVER = \"https://fake.test\" # just enough", "\"https://dodgy-site.com/\" url_parts = list(urllib.parse.urlparse(base_url)) query = dict(urllib.parse.parse_qsl(url_parts[4])) query.update({\"redirectUrl\": redirect_url}) query.update({\"ticket\":", "is used to sign tokens that shouldn't be accepted by", "}, ) self.hs.get_datastores().main.services_cache.append(self.service) self.hs.get_datastores().main.services_cache.append(self.another_service) return self.hs def test_login_appservice_user(self) -> None:", "\"yolo\" * 512 body = { \"type\": \"m.login.password\", \"user\": \"mickey\",", "page, which should 302 to the client redirectUrl chan =", "\"@frog:test\") def test_login_jwt_invalid_signature(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"}, \"notsecret\")", "TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], False) def", "\"m.login.token\", \"m.login.password\", ] + [f[\"type\"] for f in ADDITIONAL_LOGIN_FLOWS] self.assertCountEqual(", "with the expired token should still return a soft-logout self.reactor.advance(3600)", "# with `openssl genrsa 512` (not a secure way to", "test_get_login_flows(self) -> None: \"\"\"GET /login should return password and SSO", "implied. # See the License for the specific language governing", "self.login(\"kermit\", \"monkey\") # more requests with the expired token should", "check it's a UI-Auth fail self.assertEqual( set(channel.json_body.keys()), {\"flows\", \"params\", \"session\"},", "under the License. import json import time import urllib.parse from", "= channel.headers.getRawHeaders(\"Location\") assert location_headers self.assertEqual(location_headers[0][: len(redirect_url)], redirect_url) @override_config({\"sso\": {\"client_whitelist\": [\"https://legit-site.com/\"]}})", "if config.get(\"jwt_config\") is None: config[\"jwt_config\"] = self.base_config return config def", "some tests SYNAPSE_SERVER_PUBLIC_HOSTNAME = \"synapse\" # public_baseurl for some tests.", "characters in. %3D is =, %26 is &, %2B is", "should return password and SSO flows\"\"\" channel = self.make_request(\"GET\", \"/_matrix/client/r0/login\")", "{\"type\": \"m.id.user\", \"user\": \"kermit\" + str(i)}, \"password\": \"<PASSWORD>\", } channel", "str) -> str: prefix = key + \" = \"", "custom subject claim.\"\"\" channel = self.jwt_login({\"username\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "None: \"\"\"/login/sso/redirect should redirect to an identity picker\"\"\" # first", "above # private key placed in foo.key (jwt_privatekey). jwt_pubkey =", "-> None: self.register_user(\"mickey\", \"<PASSWORD>\") # create a device_id longer than", "endpoint of a CAS server, copied from https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20 This needs", "config = super().default_config() # If jwt_config has been defined (eg", "\"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # Now try to hard log out", "params) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config( { \"rc_login\": { \"account\": {\"per_second\":", "an appservice user can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params =", "[f[\"type\"] for f in ADDITIONAL_LOGIN_FLOWS] self.assertCountEqual( [f[\"type\"] for f in", "check that the username mapping session # looks ok. username_mapping_sessions", "Dict, List, Optional, Union from unittest.mock import Mock from urllib.parse", "</cas:authenticationSuccess> </cas:serviceResponse> \"\"\" % cas_user_id ).encode(\"utf-8\") mocked_http_client = Mock(spec=[\"get_raw\"]) mocked_http_client.get_raw.side_effect", "is missing the \"iss\" claim', ) def test_login_iss_no_config(self) -> None:", "LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) @skip_unless(HAS_OIDC, \"requires OIDC\") class UsernamePickerTestCase(HomeserverTestCase):", "{ \"sso\": { \"client_whitelist\": [ \"https://legit-site.com/\", \"https://other-site.com/\", ] } }", "\"https://matrix.goodserver.com/\" self.redirect_path = \"_synapse/client/login/sso/redirect/confirm\" config = self.default_config() config[\"public_baseurl\"] = (", "# An invalid audience. channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"})", "\"account\": {\"per_second\": 0.17, \"burst_count\": 5}, # Prevent the address login", "-> Dict[str, Any]: config = super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"cas_config\"]", "providers config[\"oidc_providers\"] = [ { \"idp_id\": \"idp1\", \"idp_name\": \"IDP1\", \"discover\":", "self.assertEqual(channel.result[\"code\"], b\"429\", channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "defined (eg via @override_config), don't replace it. if config.get(\"jwt_config\") is", "sign tokens that shouldn't be accepted by synapse. # Generated", "self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid audience. channel = self.jwt_login({\"sub\": \"kermit\",", "OIDC provider channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) +", "bit to check that the username mapping session # looks", "b\"/_matrix/client/r0/account/whoami\" # a (valid) url with some annoying characters in.", "params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Token field for", "this class html = channel.result[\"body\"].decode(\"utf-8\") p = TestHtmlParser() p.feed(html) p.close()", "self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Invalid JWT\") @override_config({\"jwt_config\": {**base_config,", "{ \"type\": \"m.login.password\", \"user\": \"mickey\", \"password\": \"<PASSWORD>\", \"device_id\": device_id, }", "assumed elsewhere in this class html = channel.result[\"body\"].decode(\"utf-8\") p =", "don't do form-parsing without a valid # content-length header. (\"Content-Length\",", "users cannot login with the as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params", "tell pysaml2 where to redirect to SAML_SERVER = \"https://test.saml.server/idp/sso\" TEST_SAML_METADATA", "It is generated # with `openssl genrsa 512` (not a", "\"test-client-secret\", \"scopes\": [\"profile\"], \"authorization_endpoint\": \"https://issuer1/auth\", \"token_endpoint\": \"https://issuer1/token\", \"userinfo_endpoint\": \"https://issuer1/userinfo\", \"user_mapping_provider\":", "non-as users cannot login with the as token\"\"\" self.register_appservice_user(AS_USER, self.service.token)", "self.assertIn(\"username_mapping_session\", cookies) session_id = cookies[\"username_mapping_session\"] # introspect the sso handler", "location_headers # send a request to the completion page, which", "\"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"200\",", "self.make_request( \"POST\", \"/_matrix/client/v3/login\", json.dumps(body).encode(\"utf8\"), custom_headers=None, ) # test that the", "], ) self.assertEqual(chan.code, 302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers", "synapse. Valid # tokens are signed by this and validated", "Union from unittest.mock import Mock from urllib.parse import urlencode import", "Any) -> bytes: \"\"\"Return an example response payload from a", "the login flow channel = self.helper.auth_via_oidc( {\"sub\": \"tester\", \"displayname\": \"Jonny\"},", "a username to the username picker, which should serve a", "cookies: Dict[str, str] = {} for h in cookie_headers: key,", "channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_soft_logout(self) -> None: self.register_user(\"kermit\", \"monkey\") #", "\"devices/\" + device_id, access_token=access_token ) self.assertEqual(channel.code, 401, channel.result) # check", "keep_blank_values=True, strict_parsing=True, errors=\"strict\" ) self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # finally,", "to be bothered by the per-user # ratelimiter. for i", "str(i), \"monkey\") for i in range(0, 6): params = {", "Unless required by applicable law or agreed to in writing,", "cookie_headers: key, value = h.split(\";\")[0].split(\"=\", maxsplit=1) cookies[key] = value oidc_session_cookie", "None: \"\"\"Test that an appservice user can use /login\"\"\" self.register_appservice_user(AS_USER,", "}}\"} } # whitelist this client URI so we redirect", "need to set this manually as well \"address\": {\"per_second\": 10000,", "\"m.login.password\", # https://github.com/matrix-org/synapse/issues/5665 # \"identifier\": {\"type\": \"m.id.user\", \"user\": user_id}, \"user\":", "= self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.another_service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "... with a username_mapping_session cookie cookies: Dict[str, str] = {}", "cookies) session_id = cookies[\"username_mapping_session\"] # introspect the sso handler a", "in macaroon\" % (key,)) class CASTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets,", "SAML_SERVER) # the RelayState is used to carry the client", "str] = jwt.encode(payload, secret, \"RS256\") if isinstance(result, bytes): return result.decode(\"ascii\")", "\"/logout\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_all_sessions_after_being_soft_logged_out( self,", "License. import json import time import urllib.parse from typing import", "the requested uri as # http://..., so using http in", "the specific language governing permissions and # limitations under the", "str: prefix = key + \" = \" for caveat", "in as normal access_token = self.login(\"kermit\", \"monkey\") # we should", "us to the login page of the SAML server self.assertEqual(saml_uri_path,", "SSO login flow serves a redirect to a whitelisted url\"\"\"", "an audience. channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "test_POST_ratelimiting_per_account(self) -> None: self.register_user(\"kermit\", \"monkey\") for i in range(0, 6):", "should redirect to our idp picker channel = self._make_sso_redirect_request(None) self.assertEqual(channel.code,", "= [ login.register_servlets, register.register_servlets, ] def make_homeserver(self, reactor: MemoryReactor, clock:", "not to be bothered by the per-user # ratelimiter. for", "self.assertEqual(path, \"pick_idp\") params = urllib.parse.parse_qs(query) self.assertEqual(params[\"redirectUrl\"], [TEST_CLIENT_REDIRECT_URL]) returned_idps.append(params[\"idp\"][0]) self.assertCountEqual(returned_idps, [\"cas\",", "manually as well \"address\": {\"per_second\": 10000, \"burst_count\": 10000}, \"failed_attempts\": {\"per_second\":", "or \"https://matrix.goodserver.com:8448\" ) config[\"cas_config\"] = { \"enabled\": True, \"server_url\": CAS_SERVER,", "A valid issuer. channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"test-issuer\"}) self.assertEqual(channel.result[\"code\"],", "the corresponding Synapse code awaits on it. \"\"\" return (", "<cas:proxy>https://proxy1/pgtUrl</cas:proxy> </cas:proxies> </cas:authenticationSuccess> </cas:serviceResponse> \"\"\" % cas_user_id ).encode(\"utf-8\") mocked_http_client =", "expired token should still return a soft-logout self.reactor.advance(3600) channel =", "name: used to populate public_baseurl in some tests SYNAPSE_SERVER_PUBLIC_HOSTNAME =", "idp_prov: Optional[str] = None) -> FakeChannel: \"\"\"Send a request to", "`openssl genrsa 512` (not a secure way to generate real", "mapping session # looks ok. username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions self.assertIn( session_id,", "username_mapping_session cookie cookies: Dict[str, str] = {} channel.extract_cookies(cookies) self.assertIn(\"username_mapping_session\", cookies)", "prefix = key + \" = \" for caveat in", "reactor: MemoryReactor, clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver() self.hs.config.registration.enable_registration", "an issuer claim without requiring it in the configuration.\"\"\" channel", "valid # content-length header. (\"Content-Length\", str(len(content))), ], ) self.assertEqual(chan.code, 302,", "this session channel = self.make_request(b\"POST\", \"/logout\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "an async function (as opposed to set as the mock's", ") self.assertEqual(chan.code, 302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers #", "= self.make_request( b\"DELETE\", \"devices/\" + device_id, access_token=access_token, content={\"auth\": auth}, )", "to the OIDC auth endpoint\"\"\" # pick the default OIDC", "API, which gives us our # matrix access token, mxid,", "SSO providers enabled\"\"\" servlets = [ login.register_servlets, ] def default_config(self)", "channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz\", ) self.assertEqual(channel.code, 400, channel.result) def", "{\"client_whitelist\": [\"https://legit-site.com/\"]}}) def test_deactivated_user(self) -> None: \"\"\"Logging in as a", "serving a confirmation page config[\"sso\"] = {\"client_whitelist\": [\"https://x\"]} return config", "\"\"\"Test that non-as users cannot login with the as token\"\"\"", "C.I.C. # # Licensed under the Apache License, Version 2.0", "access_token = self.login(\"kermit\", \"monkey\") # we should now be able", "def test_login_jwt_valid_registered(self) -> None: self.register_user(\"kermit\", \"monkey\") channel = self.jwt_login({\"sub\": \"kermit\"})", "test_login_appservice_wrong_user(self) -> None: \"\"\"Test that non-as users cannot login with", "\"oidc-idp1\", \"name\": \"IDP1\"}, {\"id\": \"oidc\", \"name\": \"OIDC\"}, ], ) def", "self.hs.get_datastores().main.services_cache.append(self.another_service) return self.hs def test_login_appservice_user(self) -> None: \"\"\"Test that an", ") self.another_service = ApplicationService( id=\"another__identifier\", token=\"another_token\", hostname=\"example.com\", sender=\"@as2bot:example.com\", namespaces={ ApplicationService.NS_USERS:", "_test_redirect(self, redirect_url: str) -> None: \"\"\"Tests that the SSO login", "def _test_redirect(self, redirect_url: str) -> None: \"\"\"Tests that the SSO", "are served an error template. self.assertEqual(channel.code, 403) self.assertIn(b\"SSO account deactivated\",", "CAS_SERVER + \"/login\") # check that the redirectUrl is correctly", "custom_headers=None, ) # test that the login fails with the", "than 512 characters device_id = \"yolo\" * 512 body =", "in channel.json_body[\"flows\"]} self.assertCountEqual( flows[\"m.login.sso\"][\"identity_providers\"], [ {\"id\": \"cas\", \"name\": \"CAS\"}, {\"id\":", "handler a bit to check that the username mapping session", "CAS ticket. cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url)) ) #", "self.assertEqual(channel.code, 403) self.assertIn(b\"SSO account deactivated\", channel.result[\"body\"]) @skip_unless(HAS_JWT, \"requires jwt\") class", "to be returned by an async function (as opposed to", "channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_invalid_signature(self) -> None: channel = self.jwt_login({\"sub\":", "# introspect the sso handler a bit to check that", "body = { \"type\": \"m.login.password\", \"user\": \"mickey\", \"password\": \"<PASSWORD>\", \"device_id\":", "get_raw self.hs = self.setup_test_homeserver( config=config, proxied_http_client=mocked_http_client, ) return self.hs def", "urllib.parse.urlencode(query) cas_ticket_url = urllib.parse.urlunparse(url_parts) # Get Synapse to call the", ") self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers cas_uri", "= int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) # Since we're ratelimiting", "should be about 15 minutes away expected_expiry = self.clock.time_msec() +", "Binding=\"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect\" Location=\"%(SAML_SERVER)s\"/> </md:IDPSSODescriptor> </md:EntityDescriptor> \"\"\" % { \"SAML_SERVER\": SAML_SERVER, }", "\"server_url\": CAS_SERVER, } cas_user_id = \"username\" self.user_id = \"@%s:test\" %", "\"/login\") # check that the redirectUrl is correctly encoded in", "= channel.headers.getRawHeaders(\"Location\") assert location_headers saml_uri = location_headers[0] saml_uri_path, saml_uri_query =", "self.register_user(\"mickey\", \"<PASSWORD>\") # create a device_id longer than 512 characters", "requests without an access token channel = self.make_request(b\"GET\", TEST_URL) self.assertEqual(channel.result[\"code\"],", "[] for link in p.links: path, query = link.split(\"?\", 1)", "{\"per_second\": 10000, \"burst_count\": 10000}, } } ) def test_POST_ratelimiting_per_account(self) ->", "Invalid issuer\" ) # Not providing an issuer. channel =", "None: \"\"\"Tests that the SSO login flow serves a redirect", "from urllib.parse import urlencode import pymacaroons from twisted.test.proto_helpers import MemoryReactor", "reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.deactivate_account_handler =", "def test_cas_redirect_whitelisted(self) -> None: \"\"\"Tests that the SSO login flow", "= hs.get_deactivate_account_handler() def test_cas_redirect_confirm(self) -> None: \"\"\"Tests that the SSO", "# content-length header. (\"Content-Length\", str(len(content))), ], ) self.assertEqual(chan.code, 302, chan.result)", "self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=cas\", shorthand=False, ) self.assertEqual(channel.code,", "be able to make requests without an access token channel", "HomeServer: self.hs = self.setup_test_homeserver() self.hs.config.registration.enable_registration = True self.hs.config.registration.registrations_require_3pid = []", "contains our redirect link self.assertIn(redirect_url, channel.result[\"body\"].decode(\"UTF-8\")) @override_config( { \"sso\": {", "channel.result) def test_client_idp_redirect_to_unknown(self) -> None: \"\"\"If the client tries to", "None: # Create different users so we're sure not to", "{} config[\"oidc_config\"].update(TEST_OIDC_CONFIG) config[\"oidc_config\"][\"user_mapping_provider\"] = { \"config\": {\"display_name_template\": \"{{ user.displayname }}\"}", "Dict[str, Any]: config = super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"oidc_config\"] =", "introspect the sso handler a bit to check that the", "self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Signature has expired\"", "some HTML channel = self.make_request(\"GET\", uri) self.assertEqual(channel.code, 200, channel.result) #", "= self.jwt_login({\"sub\": \"frog\"}, self.bad_privatekey) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(", "# that should redirect to the username picker self.assertEqual(channel.code, 302,", "= super().default_config() # If jwt_config has been defined (eg via", "behaviour after deleting the expired device # # we now", "\"JWT validation failed: Signature verification failed\", ) AS_USER = \"as_user_alice\"", "the sso handler a bit to check that the username", "# send a request to the completion page, which should", "to redirect to SAML_SERVER = \"https://test.saml.server/idp/sso\" TEST_SAML_METADATA = \"\"\" <md:EntityDescriptor", "strict_parsing=True, errors=\"strict\" ) self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # fish the", "device_id) channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "channel.result) def test_login_appservice_user_bot(self) -> None: \"\"\"Test that the appservice bot", "been defined (eg via @override_config), don't replace it. if config.get(\"jwt_config\")", "than # serving a confirmation page config[\"sso\"] = {\"client_whitelist\": [\"https://x\"]}", "\"device_id\": device_id, } # make a login request with the", "user ID from the default subject claim.\"\"\" channel = self.jwt_login({\"sub\":", "link in p.links: path, query = link.split(\"?\", 1) self.assertEqual(path, \"pick_idp\")", "= \"\\n\".join( [ \"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\", \"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L\", \"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY\",", "channel = self._make_sso_redirect_request(\"oidc\") self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert", "channel = self.make_request(\"GET\", cas_ticket_url) self.assertEqual(channel.code, 302) location_headers = channel.headers.getRawHeaders(\"Location\") assert", "def test_login_no_token(self) -> None: params = {\"type\": \"org.matrix.login.jwt\"} channel =", "logout self._delete_device(access_token_2, \"kermit\", \"monkey\", device_id) channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token)", "/login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\":", "an access token channel = self.make_request(b\"GET\", TEST_URL) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result)", "channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # # test behaviour after deleting", "to the `/proxyValidate` endpoint of a CAS server, copied from", "= channel.headers.getRawHeaders(\"Location\") assert location_headers cas_uri = location_headers[0] cas_uri_path, cas_uri_query =", "from tests.server import FakeChannel from tests.test_utils.html_parsers import TestHtmlParser from tests.unittest", "\" = \" for caveat in macaroon.caveats: if caveat.caveat_id.startswith(prefix): return", "str. result: Union[str, bytes] = jwt.encode(payload, secret, self.jwt_algorithm) if isinstance(result,", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_no_token(self) -> None: params", "str) -> None: \"\"\"Tests that the SSO login flow serves", "redirect to the OIDC auth endpoint\"\"\" # pick the default", "channel = self.make_request(b\"POST\", \"/logout/all\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_with_overly_long_device_id_fails(self)", "able to make requests with the access token channel =", "page of the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) def _make_sso_redirect_request(self, idp_prov:", "# make a login request with the bad device_id channel", "is + TEST_CLIENT_REDIRECT_URL = 'https://x?<ab c>&q\"+%3D%2B\"=\"fö%26=o\"' # the query params", "... which should contain our redirect link self.assertEqual(len(p.links), 1) path,", "do the start of the login flow channel = self.helper.auth_via_oidc(", "URL. \"\"\" base_url = \"/_matrix/client/r0/login/cas/ticket?redirectUrl\" redirect_url = \"https://dodgy-site.com/\" url_parts =", "that non-as users cannot login with the as token\"\"\" self.register_appservice_user(AS_USER,", "unknown IdP, return a 404\"\"\" channel = self._make_sso_redirect_request(\"xxx\") self.assertEqual(channel.code, 404,", "a custom subject claim.\"\"\" channel = self.jwt_login({\"username\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\",", "5: self.assertEqual(channel.result[\"code\"], b\"429\", channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"200\",", "self.assertEqual(session.remote_user_id, \"tester\") self.assertEqual(session.display_name, \"Jonny\") self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL) # the expiry time", "test_login_appservice_wrong_as(self) -> None: \"\"\"Test that as users cannot login with", "device\"\"\" channel = self.make_request( b\"DELETE\", \"devices/\" + device_id, access_token=access_token )", "} } ) def test_POST_ratelimiting_per_address(self) -> None: # Create different", "b\"403\", channel.result) # Since we're ratelimiting at 1 request/min, retry_after_ms", "\"config\": {\"localpart_template\": \"{{ user.sub }}\"} }, } ] return config", ") self.assertEqual(chan.code, 200, chan.result) self.assertEqual(chan.json_body[\"user_id\"], \"@user1:test\") def test_multi_sso_redirect_to_unknown(self) -> None:", "flow serves a redirect to a whitelisted url\"\"\" self._test_redirect(\"https://legit-site.com/\") @override_config({\"public_baseurl\":", "invalid issuer. channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\",", "with `openssl rsa -in foo.key -pubout`, with the the above", "raising first # # This is normally covered by the", "\"synapse\" # public_baseurl for some tests. It uses an http://", "channel.json_body[\"error\"], \"JWT validation failed: Invalid audience\" ) def test_login_default_sub(self) ->", "PRIVATE KEY-----\", \"<KEY>\", \"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L\", \"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY\", \"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I\", \"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb\", \"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0\", \"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m\", \"-----END", "[ \"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\", \"<KEY>\", \"<KEY>KVaZ/gTOM9+9MwlmhidrUOweKfB/\", \"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq\", \"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN\",", "# ensure that the returned location matches the requested redirect", "response payload from a call to the `/proxyValidate` endpoint of", "channel.json_body[\"error\"], \"JWT validation failed: Invalid issuer\" ) # Not providing", "`openssl rsa -in foo.key -pubout`, with the the above #", "issuer. channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"test-issuer\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "auth page of the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) # ...", "\"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=\", \"-----END RSA PRIVATE KEY-----\", ] ) # Generated with", "Valid # tokens are signed by this and validated using", "expiry time should be about 15 minutes away expected_expiry =", "should be a link for each href returned_idps: List[str] =", "config def create_resource_dict(self) -> Dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs))", "# we should now be able to make requests with", "is chosen, should redirect to the OIDC auth endpoint\"\"\" #", "self.assertEqual(channel.code, 200, channel.result) content_type_headers = channel.headers.getRawHeaders(\"Content-Type\") assert content_type_headers self.assertTrue(content_type_headers[-1].startswith(\"text/html\")) p", "endpoint += \"/\" + idp_prov endpoint += \"?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)", "page of the SAML server self.assertEqual(saml_uri_path, SAML_SERVER) # the RelayState", "it contains our redirect link self.assertIn(redirect_url, channel.result[\"body\"].decode(\"UTF-8\")) @override_config( { \"sso\":", "\"secret\" jwt_algorithm = \"HS256\" base_config = { \"enabled\": True, \"secret\":", "test_login_iss_no_config(self) -> None: \"\"\"Test providing an issuer claim without requiring", "+ (15 * 60 * 1000) self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000) #", "\"type\": \"m.login.password\", # https://github.com/matrix-org/synapse/issues/5665 # \"identifier\": {\"type\": \"m.id.user\", \"user\": user_id},", "False, create_requester(self.user_id) ) ) # Request the CAS ticket. cas_ticket_url", "user is deactivated they are served an error template. self.assertEqual(channel.code,", "= self.make_request(b\"POST\", \"/logout\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def", "result: Union[bytes, str] = jwt.encode(payload, secret, \"RS256\") if isinstance(result, bytes):", "self.make_request(b\"GET\", TEST_URL) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_MISSING_TOKEN\") # log in", "the body isn't empty. self.assertTrue(len(channel.result[\"body\"]) > 0) # And that", "A valid audience. channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"test-audience\"}) self.assertEqual(channel.result[\"code\"],", "\"user\": \"kermit\" + str(i)}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\",", "form to check it has fields assumed elsewhere in this", "It uses an http:// scheme because # FakeChannel.isSecure() returns False,", "{\"per_second\": 0.17, \"burst_count\": 5}, # Prevent the address login ratelimiter", "self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_wrong_as(self) -> None: \"\"\"Test that as", "LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_user_bot(self) ->", "serve a confirmation page self.assertEqual(channel.code, 200, channel.result) content_type_headers = channel.headers.getRawHeaders(\"Content-Type\")", "None: self.register_user(\"kermit\", \"<PASSWORD>\") for i in range(0, 6): params =", "self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\",", "as users cannot login with wrong as token\"\"\" self.register_appservice_user(AS_USER, self.service.token)", "You may obtain a copy of the License at #", "\"user\": self.service.sender}, } channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token", "private key placed in foo.key (jwt_privatekey). jwt_pubkey = \"\\n\".join( [", "{\"type\": \"m.id.user\", \"user\": \"fibble_wibble\"}, } channel = self.make_request( b\"POST\", LOGIN_URL,", "tests.test_utils.html_parsers import TestHtmlParser from tests.unittest import HomeserverTestCase, override_config, skip_unless try:", "the client redirectUrl chan = self.make_request( \"GET\", path=location_headers[0], custom_headers=[(\"Cookie\", \"username_mapping_session=\"", "self.assertEqual(service_uri_params[\"redirectUrl\"][0], TEST_CLIENT_REDIRECT_URL) def test_multi_sso_redirect_to_saml(self) -> None: \"\"\"If SAML is chosen,", "self.assertEqual( self._get_value_from_macaroon(macaroon, \"client_redirect_url\"), TEST_CLIENT_REDIRECT_URL, ) channel = self.helper.complete_oidc_auth(oidc_uri, cookies, {\"sub\":", "= { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\" +", "from synapse.appservice import ApplicationService from synapse.rest.client import devices, login, logout,", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid issuer. channel", "valid audience. channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"test-audience\"}) self.assertEqual(channel.result[\"code\"], b\"200\",", "None: channel = self.jwt_login({\"sub\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\")", "of synapse. Valid # tokens are signed by this and", "using the pubkey. It is generated # with `openssl genrsa", "self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\":", "(possibly experimental) login flows we expect to appear in the", "rc_login dict here, we need to set this manually as", "self.jwt_login({\"sub\": \"frog\"}, \"notsecret\") self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"],", "namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS: [],", "pick an unknown IdP, return a 404\"\"\" channel = self._make_sso_redirect_request(\"xxx\")", "range(0, 6): self.register_user(\"kermit\" + str(i), \"monkey\") for i in range(0,", "RSS256, with a public key configured in synapse as \"jwt_secret\",", "that the login fails with the correct error code self.assertEqual(channel.code,", "login method \"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE,", "unittest.mock import Mock from urllib.parse import urlencode import pymacaroons from", "= saml_uri.split(\"?\", 1) # it should redirect us to the", "= {} for h in cookie_headers: key, value = h.split(\";\")[0].split(\"=\",", "{ \"rc_login\": { \"account\": {\"per_second\": 0.17, \"burst_count\": 5}, # Prevent", "self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\":", "location matches the requested redirect URL path, query = location_headers[0].split(\"?\",", "to SAML_SERVER = \"https://test.saml.server/idp/sso\" TEST_SAML_METADATA = \"\"\" <md:EntityDescriptor xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\"> <md:IDPSSODescriptor", "a 404\"\"\" channel = self._make_sso_redirect_request(\"xxx\") self.assertEqual(channel.code, 404, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_NOT_FOUND\")", "\"&idp=cas\", shorthand=False, ) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert", "= [] self.hs.config.captcha.enable_registration_captcha = False return self.hs @override_config( { \"rc_login\":", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_wrong_user(self) -> None: \"\"\"Test that non-as", "straight to it rather than # serving a confirmation page", "= self.setup_test_homeserver() self.hs.config.registration.enable_registration = True self.hs.config.registration.registrations_require_3pid = [] self.hs.config.registration.auto_join_rooms =", "# synapse server name: used to populate public_baseurl in some", "_get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str) -> str: prefix = key +", "of jwt.encode from bytes to str. result: Union[bytes, str] =", "namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as2_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS: [],", "from tests.handlers.test_oidc import HAS_OIDC from tests.handlers.test_saml import has_saml2 from tests.rest.client.utils", "URL.\"\"\" cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url)) ) # Get", "], ) def test_multi_sso_redirect(self) -> None: \"\"\"/login/sso/redirect should redirect to", "p.close() # there should be a link for each href", "self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # ... but", "servlets = [ login.register_servlets, register.register_servlets, ] def make_homeserver(self, reactor: MemoryReactor,", "= cookies[\"username_mapping_session\"] # introspect the sso handler a bit to", "is used as the jwt_secret setting of synapse. Valid #", "as a different device access_token_2 = self.login(\"kermit\", \"monkey\") # more", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "self.login(\"kermit\", \"monkey\") # we should now be able to make", "# \"identifier\": {\"type\": \"m.id.user\", \"user\": user_id}, \"user\": user_id, \"password\": password,", "from synapse.util import Clock from tests import unittest from tests.handlers.test_oidc", "http in the public_baseurl stops Synapse trying to redirect to", "this client URI so we redirect straight to it rather", "device_id = \"yolo\" * 512 body = { \"type\": \"m.login.password\",", "\"test-audience\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid audience.", "MultiSSOTestCase(unittest.HomeserverTestCase): \"\"\"Tests for homeservers with multiple SSO providers enabled\"\"\" servlets", "parse them params = urllib.parse.parse_qsl( query, keep_blank_values=True, strict_parsing=True, errors=\"strict\" )", "test_deactivated_user(self) -> None: \"\"\"Logging in as a deactivated account should", "= \"\\n\".join( [ \"-----BEGIN PUBLIC KEY-----\", \"<KEY>\", \"<KEY> \"-----END PUBLIC", "The Matrix.org Foundation C.I.C. # # Licensed under the Apache", "-> HomeServer: self.base_url = \"https://matrix.goodserver.com/\" self.redirect_path = \"_synapse/client/login/sso/redirect/confirm\" config =", "self._make_sso_redirect_request(None) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers uri", "# the expiry time should be about 15 minutes away", "str, password: str, device_id: str ) -> None: \"\"\"Perform the", "claim without requiring it in the configuration.\"\"\" channel = self.jwt_login({\"sub\":", "CAS is chosen, should redirect to the CAS server\"\"\" channel", "b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid issuer. channel =", "def test_login_iss(self) -> None: \"\"\"Test validating the issuer claim.\"\"\" #", "override_config, skip_unless try: import jwt HAS_JWT = True except ImportError:", "= TestHtmlParser() p.feed(channel.text_body) p.close() # ... which should contain our", "= self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"],", "r\"@as_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], }, )", "FakeChannel from tests.test_utils.html_parsers import TestHtmlParser from tests.unittest import HomeserverTestCase, override_config,", "= jwt_secret) -> str: # PyJWT 2.0.0 changed the return", "None: \"\"\"If SAML is chosen, should redirect to the SAML", "\"frog\"}, self.bad_privatekey) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT", "chosen, should redirect to the CAS server\"\"\" channel = self.make_request(", "Signature has expired\" ) def test_login_jwt_not_before(self) -> None: now =", "to populate public_baseurl in some tests SYNAPSE_SERVER_PUBLIC_HOSTNAME = \"synapse\" #", "servlets = [login.register_servlets] def default_config(self) -> Dict[str, Any]: config =", "picker should give us some HTML channel = self.make_request(\"GET\", uri)", "via @override_config), don't replace it. if config.get(\"jwt_config\") is None: config[\"jwt_config\"]", "jwt_privatekey = \"\\n\".join( [ \"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\", \"<KEY>\",", "= self.login(\"kermit\", \"monkey\") # we should now be able to", "that the SSO login flow serves a redirect for the", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "{\"per_second\": 10000, \"burst_count\": 10000}, \"failed_attempts\": {\"per_second\": 0.17, \"burst_count\": 5}, }", "login with wrong as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = {", "\"JWT validation failed: Invalid audience\" ) # Not providing an", "of the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) def _make_sso_redirect_request(self, idp_prov: Optional[str]", "the username mapping session # looks ok. username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions", "License. # You may obtain a copy of the License", "login token to the login API, which gives us our", "+ str(i), \"monkey\") for i in range(0, 6): params =", "\"_synapse/client/login/sso/redirect/confirm\" config = self.default_config() config[\"public_baseurl\"] = ( config.get(\"public_baseurl\") or \"https://matrix.goodserver.com:8448\"", "the client pick a known IdP, redirect to it\"\"\" channel", "from unittest.mock import Mock from urllib.parse import urlencode import pymacaroons", "channel.result) # time passes self.reactor.advance(24 * 3600) # ... and", "= channel.json_body[\"device_id\"] # we should now be able to make", "1) service_uri_params = urllib.parse.parse_qs(service_uri_query) self.assertEqual(service_uri_params[\"redirectUrl\"][0], TEST_CLIENT_REDIRECT_URL) def test_multi_sso_redirect_to_saml(self) -> None:", "redirect to cas_uri_params = urllib.parse.parse_qs(cas_uri_query) service_uri = cas_uri_params[\"service\"][0] _, service_uri_query", "permissions and # limitations under the License. import json import", "redirect to a whitelisted url\"\"\" self._test_redirect(\"https://legit-site.com/\") @override_config({\"public_baseurl\": \"https://example.com\"}) def test_cas_redirect_login_fallback(self)", "{} for h in cookie_headers: key, value = h.split(\";\")[0].split(\"=\", maxsplit=1)", "\"m.login.sso\", \"m.login.token\", \"m.login.password\", ] + [f[\"type\"] for f in ADDITIONAL_LOGIN_FLOWS]", "] + [f[\"type\"] for f in ADDITIONAL_LOGIN_FLOWS] self.assertCountEqual( [f[\"type\"] for", "\"RS256\") if isinstance(result, bytes): return result.decode(\"ascii\") return result def jwt_login(self,", "channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\")", "JWTPubKeyTestCase is a complement to JWTTestCase where we instead use", "% cas_user_id ).encode(\"utf-8\") mocked_http_client = Mock(spec=[\"get_raw\"]) mocked_http_client.get_raw.side_effect = get_raw self.hs", "self.make_request( b\"DELETE\", \"devices/\" + device_id, access_token=access_token, content={\"auth\": auth}, ) self.assertEqual(channel.code,", "that the response is HTML. self.assertEqual(channel.code, 200, channel.result) content_type_header_value =", "The token is not yet valid (nbf)\", ) def test_login_no_sub(self)", "the login API, which gives us our # matrix access", "ok. username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions self.assertIn( session_id, username_mapping_sessions, \"session id not", "CAS server, copied from https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20 This needs to be returned", "LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Token field", "appservice user can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = {", "tolerance=1000) # Now, submit a username to the username picker,", "protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\"> <md:SingleSignOnService Binding=\"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect\" Location=\"%(SAML_SERVER)s\"/> </md:IDPSSODescriptor> </md:EntityDescriptor> \"\"\" % { \"SAML_SERVER\":", "location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers saml_uri = location_headers[0] saml_uri_path, saml_uri_query", "# there should be a link for each href returned_idps:", "Prevent the address login ratelimiter from raising first # #", "self.assertEqual(channel.code, 200, channel.result) # parse the form to check it", "\"https://test.saml.server/idp/sso\" TEST_SAML_METADATA = \"\"\" <md:EntityDescriptor xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\"> <md:IDPSSODescriptor protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\"> <md:SingleSignOnService Binding=\"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect\"", "password, \"session\": channel.json_body[\"session\"], } channel = self.make_request( b\"DELETE\", \"devices/\" +", "cas_ticket_url = urllib.parse.urlunparse(url_parts) # Get Synapse to call the fake", "we delete that device, it will be a proper logout", "{ \"enabled\": True, \"secret\": jwt_secret, \"algorithm\": jwt_algorithm, } def default_config(self)", "def default_config(self) -> Dict[str, Any]: config = super().default_config() config[\"public_baseurl\"] =", "default_config(self) -> Dict[str, Any]: config = super().default_config() config[\"jwt_config\"] = {", "\"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq\", \"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN\", \"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA\", \"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=\", \"-----END RSA PRIVATE KEY-----\", ] )", "def test_multi_sso_redirect(self) -> None: \"\"\"/login/sso/redirect should redirect to an identity", "[ \"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\", \"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L\", \"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY\", \"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I\", \"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb\",", "from tests.rest.client.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG from tests.server import FakeChannel from", "\"saml\", \"name\": \"SAML\"}, {\"id\": \"oidc-idp1\", \"name\": \"IDP1\"}, {\"id\": \"oidc\", \"name\":", "redirect link self.assertIn(redirect_url, channel.result[\"body\"].decode(\"UTF-8\")) @override_config( { \"sso\": { \"client_whitelist\": [", "token channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 200, channel.result) #", "\"saml\"]) def test_multi_sso_redirect_to_cas(self) -> None: \"\"\"If CAS is chosen, should", "Optional, Union from unittest.mock import Mock from urllib.parse import urlencode", "= super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"cas_config\"] = { \"enabled\": True,", "5}, } } ) def test_POST_ratelimiting_per_account_failed_attempts(self) -> None: self.register_user(\"kermit\", \"<PASSWORD>\")", "self.assertEqual( channel.json_body[\"error\"], 'JWT validation failed: Token is missing the \"aud\"", "self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid issuer\" ) # Not", "requested uri as # http://..., so using http in the", "providing an audience without requiring it in the configuration.\"\"\" channel", "after the normal # ones ADDITIONAL_LOGIN_FLOWS = [ {\"type\": \"m.login.application_service\"},", "\"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT", "[ \"https://legit-site.com/\", \"https://other-site.com/\", ] } } ) def test_cas_redirect_whitelisted(self) ->", "confirmation page self.assertEqual(channel.code, 200, channel.result) content_type_headers = channel.headers.getRawHeaders(\"Content-Type\") assert content_type_headers", "str ) -> None: \"\"\"Perform the UI-Auth to delete a", "-> Dict[str, Any]: config = super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"oidc_config\"]", "public_baseurl in some tests SYNAPSE_SERVER_PUBLIC_HOSTNAME = \"synapse\" # public_baseurl for", "# Copyright 2019-2021 The Matrix.org Foundation C.I.C. # # Licensed", "backend to avoid relying on xmlsec1 \"crypto_backend\": \"XMLSecurity\", }, }", "Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def test_get_login_flows(self) ->", "CAS server used in some tests CAS_SERVER = \"https://fake.test\" #", "] def default_config(self) -> Dict[str, Any]: config = super().default_config() config[\"public_baseurl\"]", "# parse the form to check it has fields assumed", "yet valid (nbf)\", ) def test_login_no_sub(self) -> None: channel =", "( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url)) ) # Get Synapse to call", "this and validated using the pubkey. It is generated #", "# we now log in as a different device access_token_2", "location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers self.assertEqual(location_headers[0][: len(redirect_url)], redirect_url) @override_config({\"sso\": {\"client_whitelist\":", "OIDC provider config[\"oidc_config\"] = TEST_OIDC_CONFIG # additional OIDC providers config[\"oidc_providers\"]", "# check it's a UI-Auth fail self.assertEqual( set(channel.json_body.keys()), {\"flows\", \"params\",", "which should contain our redirect link self.assertEqual(len(p.links), 1) path, query", "content_type_header_value = \"\" for header in channel.result.get(\"headers\", []): if header[0]", "\"https://example.com\"}) def test_cas_redirect_login_fallback(self) -> None: self._test_redirect(\"https://example.com/_matrix/static/client/login\") def _test_redirect(self, redirect_url: str)", "a known IdP, redirect to it\"\"\" channel = self._make_sso_redirect_request(\"oidc\") self.assertEqual(channel.code,", "range(0, 6): params = { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\",", "genrsa 512` (not a secure way to generate real keys,", "OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) def _make_sso_redirect_request(self, idp_prov: Optional[str] = None)", "synapse. # Generated just like jwt_privatekey. bad_privatekey = \"\\n\".join( [", "list(urllib.parse.urlparse(base_url)) query = dict(urllib.parse.parse_qsl(url_parts[4])) query.update({\"redirectUrl\": redirect_url}) query.update({\"ticket\": \"ticket\"}) url_parts[4] =", "-> None: self._test_redirect(\"https://example.com/_matrix/static/client/login\") def _test_redirect(self, redirect_url: str) -> None: \"\"\"Tests", "jwt\") class JWTTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ] jwt_secret", "\"&idp=oidc\", ) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers", "oidc_session_cookie = cookies[\"oidc_session\"] macaroon = pymacaroons.Macaroon.deserialize(oidc_session_cookie) self.assertEqual( self._get_value_from_macaroon(macaroon, \"client_redirect_url\"), TEST_CLIENT_REDIRECT_URL,", "the public_baseurl stops Synapse trying to redirect to # https://....", "b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_jwt_invalid_signature(self) -> None: channel =", "\"idp_id\": \"idp1\", \"idp_name\": \"IDP1\", \"discover\": False, \"issuer\": \"https://issuer1\", \"client_id\": \"test-client-id\",", "but if we delete that device, it will be a", "@override_config({\"jwt_config\": {**base_config, \"issuer\": \"test-issuer\"}}) def test_login_iss(self) -> None: \"\"\"Test validating", "self.assertEqual(channel.json_body[\"errcode\"], \"M_INVALID_PARAM\") @skip_unless(has_saml2 and HAS_OIDC, \"Requires SAML2 and OIDC\") class", "} channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.another_service.token ) self.assertEqual(channel.result[\"code\"],", "self, access_token: str, user_id: str, password: str, device_id: str )", "5}, # Prevent the address login ratelimiter from raising first", "cas_uri.split(\"?\", 1) # it should redirect us to the login", "AS_USER}, } channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.another_service.token )", "because # FakeChannel.isSecure() returns False, so synapse will see the", "providing an audience. channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "it has fields assumed elsewhere in this class html =", "ApplicationService( id=\"another__identifier\", token=\"another_token\", hostname=\"example.com\", sender=\"@as2bot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as2_user.*\",", "all of the user's sessions channel = self.make_request(b\"POST\", \"/logout/all\", access_token=access_token)", "% (key,)) class CASTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, ] def", "synapse.rest.admin from synapse.appservice import ApplicationService from synapse.rest.client import devices, login,", "HomeServer: self.hs = self.setup_test_homeserver() self.service = ApplicationService( id=\"unique_identifier\", token=\"some_token\", hostname=\"example.com\",", "users so we're sure not to be bothered by the", "], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], }, ) self.another_service = ApplicationService(", "LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_wrong_user(self) ->", "\"m.id.user\", \"user\": AS_USER}, } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"],", "content=content, content_is_form=True, custom_headers=[ (\"Cookie\", \"username_mapping_session=\" + session_id), # old versions", "= self.make_request(b\"POST\", LOGIN_URL, params) if i == 5: self.assertEqual(channel.result[\"code\"], b\"429\",", "# old versions of twisted don't do form-parsing without a", "urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=saml\", ) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\")", "-> None: \"\"\"Test that the appservice bot can use /login\"\"\"", "self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz\", ) self.assertEqual(channel.code, 400, channel.result) def test_client_idp_redirect_to_unknown(self) ->", "them params = urllib.parse.parse_qsl( query, keep_blank_values=True, strict_parsing=True, errors=\"strict\" ) self.assertEqual(params[0:2],", "self.assertTrue(content_type_header_value.startswith(\"text/html\")) # Test that the body isn't empty. self.assertTrue(len(channel.result[\"body\"]) >", "self.bad_privatekey) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation", "access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_wrong_user(self) -> None: \"\"\"Test", "+ 1.0) params = { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\",", "urllib.parse.parse_qs(query) self.assertEqual(params[\"redirectUrl\"], [TEST_CLIENT_REDIRECT_URL]) returned_idps.append(params[\"idp\"][0]) self.assertCountEqual(returned_idps, [\"cas\", \"oidc\", \"oidc-idp1\", \"saml\"]) def", "device_id: str ) -> None: \"\"\"Perform the UI-Auth to delete", "@override_config( { \"rc_login\": { \"address\": {\"per_second\": 0.17, \"burst_count\": 5}, #", "{ \"account\": {\"per_second\": 0.17, \"burst_count\": 5}, # Prevent the address", "the as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE,", "\"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid audience\" ) #", "not found in map\", ) session = username_mapping_sessions[session_id] self.assertEqual(session.remote_user_id, \"tester\")", "fish the login token out of the returned redirect uri", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "relying on xmlsec1 \"crypto_backend\": \"XMLSecurity\", }, } # default OIDC", "# An invalid issuer. channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"})", "config[\"public_baseurl\"] = BASE_URL config[\"oidc_config\"] = {} config[\"oidc_config\"].update(TEST_OIDC_CONFIG) config[\"oidc_config\"][\"user_mapping_provider\"] = {", "= params[2][1] # finally, submit the matrix login token to", "self.deactivate_account_handler.deactivate_account( self.user_id, False, create_requester(self.user_id) ) ) # Request the CAS", "{\"type\": \"org.matrix.login.jwt\"} channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver() self.service = ApplicationService(", "chan = self.make_request( \"POST\", path=picker_url, content=content, content_is_form=True, custom_headers=[ (\"Cookie\", \"username_mapping_session=\"", "} channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) @override_config({\"session_lifetime\":", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "Mock(spec=[\"get_raw\"]) mocked_http_client.get_raw.side_effect = get_raw self.hs = self.setup_test_homeserver( config=config, proxied_http_client=mocked_http_client, )", "\"oidc\", \"name\": \"OIDC\"}, ], ) def test_multi_sso_redirect(self) -> None: \"\"\"/login/sso/redirect", "assert location_headers # ensure that the returned location matches the", "b\"/_matrix/client/r0/login\" TEST_URL = b\"/_matrix/client/r0/account/whoami\" # a (valid) url with some", "LOGIN_URL, params, access_token=self.another_service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_no_token(self) ->", "= service_uri.split(\"?\", 1) service_uri_params = urllib.parse.parse_qs(service_uri_query) self.assertEqual(service_uri_params[\"redirectUrl\"][0], TEST_CLIENT_REDIRECT_URL) def test_multi_sso_redirect_to_saml(self)", "= [ login.register_servlets, ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock)", "None: self.register_user(\"kermit\", \"monkey\") for i in range(0, 6): params =", "\"idp_name\": \"IDP1\", \"discover\": False, \"issuer\": \"https://issuer1\", \"client_id\": \"test-client-id\", \"client_secret\": \"test-client-secret\",", "if header[0] == b\"Content-Type\": content_type_header_value = header[1].decode(\"utf8\") self.assertTrue(content_type_header_value.startswith(\"text/html\")) # Test", "session channel = self.make_request(b\"POST\", \"/logout\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config({\"session_lifetime\":", "user.sub }}\"} }, } ] return config def create_resource_dict(self) ->", "# Not providing an issuer. channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"],", "here, we need to set this manually as well \"address\":", "\"client_secret\": \"test-client-secret\", \"scopes\": [\"profile\"], \"authorization_endpoint\": \"https://issuer1/auth\", \"token_endpoint\": \"https://issuer1/token\", \"userinfo_endpoint\": \"https://issuer1/userinfo\",", ") ) # Request the CAS ticket. cas_ticket_url = (", "0.17, \"burst_count\": 5}, # Prevent the address login ratelimiter from", "def test_login_jwt_invalid_signature(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"}, \"notsecret\") self.assertEqual(channel.result[\"code\"],", ") # test that the login fails with the correct", "required by applicable law or agreed to in writing, software", "config[\"oidc_config\"] = TEST_OIDC_CONFIG # additional OIDC providers config[\"oidc_providers\"] = [", "ie, the # place that CAS will redirect to cas_uri_params", "xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\"> <md:IDPSSODescriptor protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\"> <md:SingleSignOnService Binding=\"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect\" Location=\"%(SAML_SERVER)s\"/> </md:IDPSSODescriptor> </md:EntityDescriptor> \"\"\" %", "\"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_valid_unregistered(self) -> None:", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "http_server: WhoamiRestServlet(hs).register(http_server), ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) ->", "# tokens are signed by this and validated using the", "redirect to the CAS server\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\"", "issuer\" ) # Not providing an issuer. channel = self.jwt_login({\"sub\":", "= self._make_sso_redirect_request(\"xxx\") self.assertEqual(channel.code, 404, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_NOT_FOUND\") def test_client_idp_redirect_to_oidc(self) ->", "chan = self.make_request( \"POST\", \"/login\", content={\"type\": \"m.login.token\", \"token\": login_token}, )", "test_cas_redirect_whitelisted(self) -> None: \"\"\"Tests that the SSO login flow serves", "a whitelisted url\"\"\" self._test_redirect(\"https://legit-site.com/\") @override_config({\"public_baseurl\": \"https://example.com\"}) def test_cas_redirect_login_fallback(self) -> None:", "Any]: config = super().default_config() config[\"jwt_config\"] = { \"enabled\": True, \"secret\":", "uses an http:// scheme because # FakeChannel.isSecure() returns False, so", "submit the matrix login token to the login API, which", "location_headers[0] self.assertEqual(picker_url, \"/_synapse/client/pick_username/account_details\") # ... with a username_mapping_session cookie cookies:", "</md:IDPSSODescriptor> </md:EntityDescriptor> \"\"\" % { \"SAML_SERVER\": SAML_SERVER, } LOGIN_URL =", "\"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_jwt_invalid_signature(self) -> None:", "ApplicationService.NS_USERS: [ {\"regex\": r\"@as_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES:", "} channel = self.make_request(b\"POST\", LOGIN_URL, params) if i == 5:", "the template. channel = self.make_request(\"GET\", cas_ticket_url) # Test that the", "\"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\", \"<KEY>\", \"<KEY>KVaZ/gTOM9+9MwlmhidrUOweKfB/\", \"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq\", \"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN\", \"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA\",", "= super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def test_username_picker(self) -> None: \"\"\"Test", "self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # Now try", "self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_valid_unregistered(self) ->", "def test_login_via_oidc(self) -> None: \"\"\"If OIDC is chosen, should redirect", "agreed to in writing, software # distributed under the License", "None: \"\"\"If OIDC is chosen, should redirect to the OIDC", "with the access token channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code,", "self.hs.config.registration.enable_registration = True self.hs.config.registration.registrations_require_3pid = [] self.hs.config.registration.auto_join_rooms = [] self.hs.config.captcha.enable_registration_captcha", "redirect url saml_uri_params = urllib.parse.parse_qs(saml_uri_query) relay_state_param = saml_uri_params[\"RelayState\"][0] self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL)", "distributed under the License is distributed on an \"AS IS\"", "the redirect url, which should redirect to our idp picker", "\"\"\"Test validating the issuer claim.\"\"\" # A valid issuer. channel", "[ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, logout.register_servlets, devices.register_servlets, lambda hs, http_server: WhoamiRestServlet(hs).register(http_server), ]", "redirect_url}) query.update({\"ticket\": \"ticket\"}) url_parts[4] = urllib.parse.urlencode(query) cas_ticket_url = urllib.parse.urlunparse(url_parts) #", "pymacaroons from twisted.test.proto_helpers import MemoryReactor from twisted.web.resource import Resource import", "bad device_id channel = self.make_request( \"POST\", \"/_matrix/client/v3/login\", json.dumps(body).encode(\"utf8\"), custom_headers=None, )", "channel = self.make_request(\"GET\", cas_ticket_url) # Because the user is deactivated", "the auth page of the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) #", "location_headers picker_url = location_headers[0] self.assertEqual(picker_url, \"/_synapse/client/pick_username/account_details\") # ... with a", "<md:EntityDescriptor xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\"> <md:IDPSSODescriptor protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\"> <md:SingleSignOnService Binding=\"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect\" Location=\"%(SAML_SERVER)s\"/> </md:IDPSSODescriptor> </md:EntityDescriptor> \"\"\"", "= { \"enabled\": True, \"server_url\": CAS_SERVER, \"service_url\": \"https://matrix.goodserver.com:8448\", } config[\"saml2_config\"]", "to an identity picker\"\"\" # first hit the redirect url,", "TEST_CLIENT_REDIRECT_URL) def test_login_via_oidc(self) -> None: \"\"\"If OIDC is chosen, should", "well \"account\": {\"per_second\": 10000, \"burst_count\": 10000}, } } ) def", "auth endpoint\"\"\" # pick the default OIDC provider channel =", "class JWTPubKeyTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, ] # This key's", "\"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY\", \"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I\", \"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb\", \"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0\", \"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m\", \"-----END RSA PRIVATE KEY-----\", ]", "d.update(build_synapse_client_resource_tree(self.hs)) return d def test_username_picker(self) -> None: \"\"\"Test the happy", "typing import Any, Dict, List, Optional, Union from unittest.mock import", "= self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) @skip_unless(HAS_OIDC, \"requires OIDC\")", "302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers # ensure that", "synapse.rest.client.account import WhoamiRestServlet from synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server import", "appear in the list after the normal # ones ADDITIONAL_LOGIN_FLOWS", "} ) def test_POST_ratelimiting_per_account_failed_attempts(self) -> None: self.register_user(\"kermit\", \"<PASSWORD>\") for i", "picker\"\"\" # first hit the redirect url, which should redirect", "\"user\": user_id}, \"user\": user_id, \"password\": password, \"session\": channel.json_body[\"session\"], } channel", "= cas_uri_params[\"service\"][0] _, service_uri_query = service_uri.split(\"?\", 1) service_uri_params = urllib.parse.parse_qs(service_uri_query)", "channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_no_token(self) -> None: params = {\"type\":", "\"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb\", \"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0\", \"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m\", \"-----END RSA PRIVATE KEY-----\", ] ) def", "channel.result) content_type_headers = channel.headers.getRawHeaders(\"Content-Type\") assert content_type_headers self.assertTrue(content_type_headers[-1].startswith(\"text/html\")) p = TestHtmlParser()", "self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Signature verification failed\", ) def", "key is used to sign tokens that shouldn't be accepted", "\"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L\", \"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY\", \"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I\", \"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb\", \"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0\", \"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m\", \"-----END RSA PRIVATE KEY-----\",", "\"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": self.service.sender}, } channel =", "channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) @skip_unless(HAS_OIDC, \"requires", "token to the login API, which gives us our #", "Dict[str, Any], secret: str = jwt_privatekey) -> str: # PyJWT", "/login should return password and SSO flows\"\"\" channel = self.make_request(\"GET\",", ") def test_POST_ratelimiting_per_address(self) -> None: # Create different users so", "\"\"\" % cas_user_id ).encode(\"utf-8\") mocked_http_client = Mock(spec=[\"get_raw\"]) mocked_http_client.get_raw.side_effect = get_raw", "self.assertEqual(channel.code, 200, channel.result) content_type_header_value = \"\" for header in channel.result.get(\"headers\",", "a redirect to a whitelisted url\"\"\" self._test_redirect(\"https://legit-site.com/\") @override_config({\"public_baseurl\": \"https://example.com\"}) def", "is generated # with `openssl genrsa 512` (not a secure", "LOGIN_URL, params) return channel def test_login_jwt_valid(self) -> None: channel =", "now + 3600}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"],", "@override_config({\"jwt_config\": {**base_config, \"subject_claim\": \"username\"}}) def test_login_custom_sub(self) -> None: \"\"\"Test reading", "returned_idps: List[str] = [] for link in p.links: path, query", "\"discover\": False, \"issuer\": \"https://issuer1\", \"client_id\": \"test-client-id\", \"client_secret\": \"test-client-secret\", \"scopes\": [\"profile\"],", "self.make_request(b\"POST\", \"/logout\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_all_sessions_after_being_soft_logged_out(", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_all_sessions_after_being_soft_logged_out( self, ) ->", "= self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=cas\", shorthand=False, )", "a different device access_token_2 = self.login(\"kermit\", \"monkey\") # more requests", "location_headers cas_uri = location_headers[0] cas_uri_path, cas_uri_query = cas_uri.split(\"?\", 1) #", "= self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config,", "found in map\", ) session = username_mapping_sessions[session_id] self.assertEqual(session.remote_user_id, \"tester\") self.assertEqual(session.display_name,", "{\"per_second\": 0.17, \"burst_count\": 5}, } } ) def test_POST_ratelimiting_per_account_failed_attempts(self) ->", "service_uri.split(\"?\", 1) service_uri_params = urllib.parse.parse_qs(service_uri_query) self.assertEqual(service_uri_params[\"redirectUrl\"][0], TEST_CLIENT_REDIRECT_URL) def test_multi_sso_redirect_to_saml(self) ->", "now = int(time.time()) channel = self.jwt_login({\"sub\": \"frog\", \"nbf\": now +", "= urllib.parse.parse_qs(query) self.assertEqual(params[\"redirectUrl\"], [TEST_CLIENT_REDIRECT_URL]) returned_idps.append(params[\"idp\"][0]) self.assertCountEqual(returned_idps, [\"cas\", \"oidc\", \"oidc-idp1\", \"saml\"])", "{\"type\": \"m.login.application_service\"}, {\"type\": \"uk.half-shot.msc2778.login.application_service\"}, ] class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [", "+ idp_prov endpoint += \"?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) return self.make_request( \"GET\",", "audience\" ) def test_login_default_sub(self) -> None: \"\"\"Test reading user ID", "None: self.register_user(\"kermit\", \"monkey\") # log in as normal access_token =", "a request to /_matrix/client/r0/login/sso/redirect ... possibly specifying an IDP provider", "if i == 5: self.assertEqual(channel.result[\"code\"], b\"429\", channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"])", "*args: Any) -> FakeChannel: params = {\"type\": \"org.matrix.login.jwt\", \"token\": self.jwt_encode(*args)}", "Any]: config = super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"cas_config\"] = {", "self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid issuer\" )", "Any) -> FakeChannel: params = {\"type\": \"org.matrix.login.jwt\", \"token\": self.jwt_encode(*args)} channel", "requiring it in the configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\", \"iss\":", "False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], }, ) self.hs.get_datastores().main.services_cache.append(self.service) self.hs.get_datastores().main.services_cache.append(self.another_service)", "auth = { \"type\": \"m.login.password\", # https://github.com/matrix-org/synapse/issues/5665 # \"identifier\": {\"type\":", "https://github.com/matrix-org/synapse/issues/5665 # \"identifier\": {\"type\": \"m.id.user\", \"user\": user_id}, \"user\": user_id, \"password\":", "def test_login_appservice_wrong_user(self) -> None: \"\"\"Test that non-as users cannot login", "== b\"Content-Type\": content_type_header_value = header[1].decode(\"utf8\") self.assertTrue(content_type_header_value.startswith(\"text/html\")) # Test that the", "failed: Signature has expired\" ) def test_login_jwt_not_before(self) -> None: now", ") # that should redirect to the username picker self.assertEqual(channel.code,", "return type of jwt.encode from bytes to str. result: Union[bytes,", "channel.result[\"body\"].decode(\"utf-8\") p = TestHtmlParser() p.feed(html) p.close() # there should be", "access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_all_sessions_after_being_soft_logged_out( self, )", "# place that CAS will redirect to cas_uri_params = urllib.parse.parse_qs(cas_uri_query)", "channel.result) access_token = channel.json_body[\"access_token\"] device_id = channel.json_body[\"device_id\"] # we should", "ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], }, ) self.another_service = ApplicationService( id=\"another__identifier\",", "UsernamePickerTestCase(HomeserverTestCase): \"\"\"Tests for the username picker flow of SSO login\"\"\"", "for the given redirect URL.\"\"\" cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" %", "# Prevent the account login ratelimiter from raising first #", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Signature verification", "Dict[str, Any]: config = super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"cas_config\"] =", "service_uri_params = urllib.parse.parse_qs(service_uri_query) self.assertEqual(service_uri_params[\"redirectUrl\"][0], TEST_CLIENT_REDIRECT_URL) def test_multi_sso_redirect_to_saml(self) -> None: \"\"\"If", "OR CONDITIONS OF ANY KIND, either express or implied. #", "\"requires OIDC\") class UsernamePickerTestCase(HomeserverTestCase): \"\"\"Tests for the username picker flow", "} # default OIDC provider config[\"oidc_config\"] = TEST_OIDC_CONFIG # additional", "self.base_url = \"https://matrix.goodserver.com/\" self.redirect_path = \"_synapse/client/login/sso/redirect/confirm\" config = self.default_config() config[\"public_baseurl\"]", "\"\"\"Return an example response payload from a call to the", "the License is distributed on an \"AS IS\" BASIS, #", "manually as well \"address\": {\"per_second\": 10000, \"burst_count\": 10000}, } }", "template. channel = self.make_request(\"GET\", cas_ticket_url) # Test that the response", "% cas_user_id async def get_raw(uri: str, args: Any) -> bytes:", "reading user ID from the default subject claim.\"\"\" channel =", "carry the client redirect url saml_uri_params = urllib.parse.parse_qs(saml_uri_query) relay_state_param =", "h in cookie_headers: key, value = h.split(\";\")[0].split(\"=\", maxsplit=1) cookies[key] =", "well \"address\": {\"per_second\": 10000, \"burst_count\": 10000}, \"failed_attempts\": {\"per_second\": 0.17, \"burst_count\":", "self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Signature has expired\" ) def", "OIDC providers config[\"oidc_providers\"] = [ { \"idp_id\": \"idp1\", \"idp_name\": \"IDP1\",", "server self.assertEqual(saml_uri_path, SAML_SERVER) # the RelayState is used to carry", "FakeChannel: params = {\"type\": \"org.matrix.login.jwt\", \"token\": self.jwt_encode(*args)} channel = self.make_request(b\"POST\",", "\"kermit\"}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"],", "= ( config.get(\"public_baseurl\") or \"https://matrix.goodserver.com:8448\" ) config[\"cas_config\"] = { \"enabled\":", "idp picker channel = self._make_sso_redirect_request(None) self.assertEqual(channel.code, 302, channel.result) location_headers =", "that the SSO login flow serves a redirect to a", "code self.assertEqual(channel.code, 400) self.assertEqual(channel.json_body[\"errcode\"], \"M_INVALID_PARAM\") @skip_unless(has_saml2 and HAS_OIDC, \"Requires SAML2", "with the the above # private key placed in foo.key", "LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config( { \"rc_login\": { \"account\":", "requests with the access token channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token)", "\"tester\", \"displayname\": \"Jonny\"}, TEST_CLIENT_REDIRECT_URL ) # that should redirect to", "= [(\"<ab c>\", \"\"), ('q\" =+\"', '\"fö&=o\"')] # (possibly experimental)", "False, \"issuer\": \"https://issuer1\", \"client_id\": \"test-client-id\", \"client_secret\": \"test-client-secret\", \"scopes\": [\"profile\"], \"authorization_endpoint\":", "= params[2][1] chan = self.make_request( \"POST\", \"/login\", content={\"type\": \"m.login.token\", \"token\":", "function (as opposed to set as the mock's return value)", "serves a redirect for the given redirect URL.\"\"\" cas_ticket_url =", "1000.0 + 1.0) params = { \"type\": \"m.login.password\", \"identifier\": {\"type\":", "\"\"\"Test providing an audience without requiring it in the configuration.\"\"\"", "expected_flow_types ) flows = {flow[\"type\"]: flow for flow in channel.json_body[\"flows\"]}", "[\"test-audience\"]}}) def test_login_aud(self) -> None: \"\"\"Test validating the audience claim.\"\"\"", "# Now, submit a username to the username picker, which", "a bit to check that the username mapping session #", "reactor: MemoryReactor, clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver() self.service", "avoid relying on xmlsec1 \"crypto_backend\": \"XMLSecurity\", }, } # default", "failed\", ) def test_login_jwt_expired(self) -> None: channel = self.jwt_login({\"sub\": \"frog\",", "law or agreed to in writing, software # distributed under", "payload from a call to the `/proxyValidate` endpoint of a", "login flow serves a redirect to a whitelisted url\"\"\" self._test_redirect(\"https://legit-site.com/\")", "10000, \"burst_count\": 10000}, \"failed_attempts\": {\"per_second\": 0.17, \"burst_count\": 5}, } }", "that picker should give us some HTML channel = self.make_request(\"GET\",", "p = TestHtmlParser() p.feed(html) p.close() # there should be a", "some annoying characters in. %3D is =, %26 is &,", "len(redirect_url)], redirect_url) @override_config({\"sso\": {\"client_whitelist\": [\"https://legit-site.com/\"]}}) def test_deactivated_user(self) -> None: \"\"\"Logging", "location_headers # ensure that the returned location matches the requested", "the default subject claim.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\",", "the jwt_secret setting of synapse. Valid # tokens are signed", "the user is deactivated they are served an error template.", "{\"regex\": r\"@as2_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], },", "self.register_user(\"kermit\" + str(i), \"monkey\") for i in range(0, 6): params", "= self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config( { \"rc_login\":", "hs, http_server: WhoamiRestServlet(hs).register(http_server), ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock)", "self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) @skip_unless(HAS_OIDC, \"requires OIDC\") class UsernamePickerTestCase(HomeserverTestCase): \"\"\"Tests for", "@override_config({\"session_lifetime\": \"24h\"}) def test_soft_logout(self) -> None: self.register_user(\"kermit\", \"monkey\") # we", "import json import time import urllib.parse from typing import Any,", "] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.hs", "url\"\"\" self._test_redirect(\"https://legit-site.com/\") @override_config({\"public_baseurl\": \"https://example.com\"}) def test_cas_redirect_login_fallback(self) -> None: self._test_redirect(\"https://example.com/_matrix/static/client/login\") def", "Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def test_username_picker(self) ->", "access_token_2 = self.login(\"kermit\", \"monkey\") # more requests with the expired", "content={\"auth\": auth}, ) self.assertEqual(channel.code, 200, channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_after_being_soft_logged_out(self)", "\"uk.half-shot.msc2778.login.application_service\"}, ] class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, logout.register_servlets,", "for some tests. It uses an http:// scheme because #", "to the completion page, which should 302 to the client", "user_id, \"password\": password, \"session\": channel.json_body[\"session\"], } channel = self.make_request( b\"DELETE\",", "10000}, } } ) def test_POST_ratelimiting_per_address(self) -> None: # Create", "may obtain a copy of the License at # #", "and validated using the pubkey. It is generated # with", "int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) # Since we're ratelimiting at", "params = {\"type\": \"org.matrix.login.jwt\"} channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"],", "-in foo.key -pubout`, with the the above # private key", "[ login.register_servlets, ] # This key's pubkey is used as", "the username picker flow of SSO login\"\"\" servlets = [login.register_servlets]", "from tests import unittest from tests.handlers.test_oidc import HAS_OIDC from tests.handlers.test_saml", "5: self.assertEqual(channel.result[\"code\"], b\"429\", channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"403\",", ") self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # finally, submit the matrix", "login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": AS_USER}, } channel = self.make_request(b\"POST\",", "ApplicationService.NS_ALIASES: [], }, ) self.hs.get_datastores().main.services_cache.append(self.service) self.hs.get_datastores().main.services_cache.append(self.another_service) return self.hs def test_login_appservice_user(self)", "access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_wrong_as(self) -> None: \"\"\"Test", "the access token channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 200,", "= h.split(\";\")[0].split(\"=\", maxsplit=1) cookies[key] = value oidc_session_cookie = cookies[\"oidc_session\"] macaroon", "/ 1000.0) params = { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\",", "to appear in the list after the normal # ones", "p.links[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") # it will have url-encoded the", "channel.result) @skip_unless(HAS_OIDC, \"requires OIDC\") class UsernamePickerTestCase(HomeserverTestCase): \"\"\"Tests for the username", "redirectUrl is correctly encoded in the service param - ie,", "server used in some tests CAS_SERVER = \"https://fake.test\" # just", "may not use this file except in compliance with the", "location_headers self.assertEqual(location_headers[0][: len(redirect_url)], redirect_url) @override_config({\"sso\": {\"client_whitelist\": [\"https://legit-site.com/\"]}}) def test_deactivated_user(self) ->", "\"username_mapping_session=\" + session_id), # old versions of twisted don't do", "type of jwt.encode from bytes to str. result: Union[str, bytes]", "= channel.headers.getRawHeaders(\"Location\") assert location_headers picker_url = location_headers[0] self.assertEqual(picker_url, \"/_synapse/client/pick_username/account_details\") #", "\"https://matrix.goodserver.com:8448\" ) config[\"cas_config\"] = { \"enabled\": True, \"server_url\": CAS_SERVER, }", "] # This key's pubkey is used as the jwt_secret", "200, chan.result) self.assertEqual(chan.json_body[\"user_id\"], \"@user1:test\") def test_multi_sso_redirect_to_unknown(self) -> None: \"\"\"An unknown", "'https://x?<ab c>&q\"+%3D%2B\"=\"fö%26=o\"' # the query params in TEST_CLIENT_REDIRECT_URL EXPECTED_CLIENT_REDIRECT_URL_PARAMS =", "TEST_URL) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_MISSING_TOKEN\") # log in as", "302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers oidc_uri = location_headers[0]", "self.service.sender}, } channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token )", "without a valid # content-length header. (\"Content-Length\", str(len(content))), ], )", "this file except in compliance with the License. # You", "a call to the `/proxyValidate` endpoint of a CAS server,", "return d def test_username_picker(self) -> None: \"\"\"Test the happy path", "test_cas_redirect_login_fallback(self) -> None: self._test_redirect(\"https://example.com/_matrix/static/client/login\") def _test_redirect(self, redirect_url: str) -> None:", "None: \"\"\"Test the happy path of a username picker flow.\"\"\"", "matrix login token to the login API, which gives us", "= self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_valid_unregistered(self)", "tests.unittest import HomeserverTestCase, override_config, skip_unless try: import jwt HAS_JWT =", "self.assertCountEqual(returned_idps, [\"cas\", \"oidc\", \"oidc-idp1\", \"saml\"]) def test_multi_sso_redirect_to_cas(self) -> None: \"\"\"If", "b\"200\", channel.result) # Since we're ratelimiting at 1 request/min, retry_after_ms", "http:// scheme because # FakeChannel.isSecure() returns False, so synapse will", "location_headers[0] cas_uri_path, cas_uri_query = cas_uri.split(\"?\", 1) # it should redirect", "gives us our # matrix access token, mxid, and device", "self.hs = self.setup_test_homeserver( config=config, proxied_http_client=mocked_http_client, ) return self.hs def prepare(self,", "# # Licensed under the Apache License, Version 2.0 (the", "access_token=self.another_service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_no_token(self) -> None: \"\"\"Test", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "return self.make_request( \"GET\", endpoint, custom_headers=[(\"Host\", SYNAPSE_SERVER_PUBLIC_HOSTNAME)], ) @staticmethod def _get_value_from_macaroon(macaroon:", "def test_login_no_sub(self) -> None: channel = self.jwt_login({\"username\": \"root\"}) self.assertEqual(channel.result[\"code\"], b\"403\",", "username to the username picker, which should serve a redirect", "servlets = [ login.register_servlets, ] def make_homeserver(self, reactor: MemoryReactor, clock:", "is HTML. self.assertEqual(channel.code, 200, channel.result) content_type_header_value = \"\" for header", "] ) # This key is used to sign tokens", "to generate real keys, but # good enough for tests!)", "import urllib.parse from typing import Any, Dict, List, Optional, Union", "-> str: prefix = key + \" = \" for", "so using http in the public_baseurl stops Synapse trying to", "= \"\" for header in channel.result.get(\"headers\", []): if header[0] ==", "bothered by the per-user # ratelimiter. for i in range(0,", "hard log out all of the user's sessions channel =", "url_parts = list(urllib.parse.urlparse(base_url)) query = dict(urllib.parse.parse_qsl(url_parts[4])) query.update({\"redirectUrl\": redirect_url}) query.update({\"ticket\": \"ticket\"})", "field for JWT is missing\") # The JWTPubKeyTestCase is a", "passes self.reactor.advance(24 * 3600) # ... and we should be", "synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server import HomeServer from synapse.types import", "self, ) -> None: self.register_user(\"kermit\", \"monkey\") # log in as", "0.17, \"burst_count\": 5}, } } ) def test_POST_ratelimiting_per_account_failed_attempts(self) -> None:", "log in as normal access_token = self.login(\"kermit\", \"monkey\") # we", "channel = self.make_request(\"GET\", uri) self.assertEqual(channel.code, 200, channel.result) # parse the", "flow serves a redirect for the given redirect URL.\"\"\" cas_ticket_url", "True, \"server_url\": CAS_SERVER, \"service_url\": \"https://matrix.goodserver.com:8448\", } config[\"saml2_config\"] = { \"sp_config\":", "`/proxyValidate` endpoint of a CAS server, copied from https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20 This", "reading user ID from a custom subject claim.\"\"\" channel =", "\"monkey\") # we should now be able to make requests", "map\", ) session = username_mapping_sessions[session_id] self.assertEqual(session.remote_user_id, \"tester\") self.assertEqual(session.display_name, \"Jonny\") self.assertEqual(session.client_redirect_url,", "Now try to hard logout this session channel = self.make_request(b\"POST\",", "mxid, and device id. login_token = params[2][1] chan = self.make_request(", "# test behaviour after deleting the expired device # #", "{ \"address\": {\"per_second\": 0.17, \"burst_count\": 5}, # Prevent the account", "but # good enough for tests!) jwt_privatekey = \"\\n\".join( [", "Dict[str, str] = {} for h in cookie_headers: key, value", "b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_invalid_signature(self) -> None: channel =", "json.dumps(body).encode(\"utf8\"), custom_headers=None, ) # test that the login fails with", "make requests without an access token channel = self.make_request(b\"GET\", TEST_URL)", "\"https://legit-site.com/\", \"https://other-site.com/\", ] } } ) def test_cas_redirect_whitelisted(self) -> None:", "test_POST_ratelimiting_per_account_failed_attempts(self) -> None: self.register_user(\"kermit\", \"<PASSWORD>\") for i in range(0, 6):", "jwt.encode from bytes to str. result: Union[bytes, str] = jwt.encode(payload,", "known IdP, redirect to it\"\"\" channel = self._make_sso_redirect_request(\"oidc\") self.assertEqual(channel.code, 302,", "HTML channel = self.make_request(\"GET\", uri) self.assertEqual(channel.code, 200, channel.result) # parse", "\"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\"}, \"password\": \"<PASSWORD>\", } channel", "SAML server\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) +", "] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.base_url", "The JWTPubKeyTestCase is a complement to JWTTestCase where we instead", "401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], False) def _delete_device( self, access_token:", "def test_session_can_hard_logout_all_sessions_after_being_soft_logged_out( self, ) -> None: self.register_user(\"kermit\", \"monkey\") # log", "int(time.time()) channel = self.jwt_login({\"sub\": \"frog\", \"nbf\": now + 3600}) self.assertEqual(channel.result[\"code\"],", "config.get(\"jwt_config\") is None: config[\"jwt_config\"] = self.base_config return config def jwt_encode(self,", "location_headers[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") # it will have url-encoded the", "now log in as a different device access_token_2 = self.login(\"kermit\",", "None) -> FakeChannel: \"\"\"Send a request to /_matrix/client/r0/login/sso/redirect ... possibly", "\"client_id\": \"test-client-id\", \"client_secret\": \"test-client-secret\", \"scopes\": [\"profile\"], \"authorization_endpoint\": \"https://issuer1/auth\", \"token_endpoint\": \"https://issuer1/token\",", "None: \"\"\"Test that as users cannot login with wrong as", "= p.links[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") # it will have url-encoded", "import TestHtmlParser from tests.unittest import HomeserverTestCase, override_config, skip_unless try: import", "\"client_redirect_url\"), TEST_CLIENT_REDIRECT_URL, ) channel = self.helper.complete_oidc_auth(oidc_uri, cookies, {\"sub\": \"user1\"}) #", "self.assertEqual(location_headers[0][: len(redirect_url)], redirect_url) @override_config({\"sso\": {\"client_whitelist\": [\"https://legit-site.com/\"]}}) def test_deactivated_user(self) -> None:", "or implied. # See the License for the specific language", "from tests.handlers.test_saml import has_saml2 from tests.rest.client.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG from", "\"m.login.cas\", \"m.login.sso\", \"m.login.token\", \"m.login.password\", ] + [f[\"type\"] for f in", "password: str, device_id: str ) -> None: \"\"\"Perform the UI-Auth", "for caveat in macaroon.caveats: if caveat.caveat_id.startswith(prefix): return caveat.caveat_id[len(prefix) :] raise", "= [ login.register_servlets, ] # This key's pubkey is used", "self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def", "\"crypto_backend\": \"XMLSecurity\", }, } # default OIDC provider config[\"oidc_config\"] =", "RSA PRIVATE KEY-----\", ] ) def default_config(self) -> Dict[str, Any]:", "list after the normal # ones ADDITIONAL_LOGIN_FLOWS = [ {\"type\":", "in the public_baseurl stops Synapse trying to redirect to #", ") auth = { \"type\": \"m.login.password\", # https://github.com/matrix-org/synapse/issues/5665 # \"identifier\":", "from raising first # # This is normally covered by", "that CAS will redirect to cas_uri_params = urllib.parse.parse_qs(cas_uri_query) service_uri =", "by this and validated using the pubkey. It is generated", "content = urlencode({b\"username\": b\"bobby\"}).encode(\"utf8\") chan = self.make_request( \"POST\", path=picker_url, content=content,", "[ {\"id\": \"cas\", \"name\": \"CAS\"}, {\"id\": \"saml\", \"name\": \"SAML\"}, {\"id\":", "cookies[\"username_mapping_session\"] # introspect the sso handler a bit to check", "BASE_URL = \"http://%s/\" % (SYNAPSE_SERVER_PUBLIC_HOSTNAME,) # CAS server used in", "redirect # to the completion page content = urlencode({b\"username\": b\"bobby\"}).encode(\"utf8\")", "send a request to the completion page, which should 302", "self.assertEqual(channel.json_body[\"errcode\"], \"M_MISSING_TOKEN\") # log in as normal params = {", "params[2][1] # finally, submit the matrix login token to the", "\"\" for header in channel.result.get(\"headers\", []): if header[0] == b\"Content-Type\":", "= self.make_request( \"GET\", path=location_headers[0], custom_headers=[(\"Cookie\", \"username_mapping_session=\" + session_id)], ) self.assertEqual(chan.code,", "values to 10000, but as we're overriding the entire #", "{\"type\": \"org.matrix.login.jwt\", \"token\": self.jwt_encode(*args)} channel = self.make_request(b\"POST\", LOGIN_URL, params) return", "self.make_request( \"POST\", \"/login\", content={\"type\": \"m.login.token\", \"token\": login_token}, ) self.assertEqual(chan.code, 200,", "self.assertEqual( set(channel.json_body.keys()), {\"flows\", \"params\", \"session\"}, channel.result, ) auth = {", "-> None: \"\"\"Test validating the issuer claim.\"\"\" # A valid", "way to generate real keys, but # good enough for", "the query params in TEST_CLIENT_REDIRECT_URL EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [(\"<ab c>\", \"\"),", "= cas_uri.split(\"?\", 1) # it should redirect us to the", "} # make a login request with the bad device_id", "place that CAS will redirect to cas_uri_params = urllib.parse.parse_qs(cas_uri_query) service_uri", "channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=oidc\", )", "the above # private key placed in foo.key (jwt_privatekey). jwt_pubkey", "redirect us to the login page of the SAML server", "errors=\"strict\" ) self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # fish the login", "class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, logout.register_servlets, devices.register_servlets, lambda", "should still return a soft-logout self.reactor.advance(3600) channel = self.make_request(b\"GET\", TEST_URL,", "import unittest from tests.handlers.test_oidc import HAS_OIDC from tests.handlers.test_saml import has_saml2", "None: params = {\"type\": \"org.matrix.login.jwt\"} channel = self.make_request(b\"POST\", LOGIN_URL, params)", "self.assertEqual( channel.json_body[\"error\"], 'JWT validation failed: Token is missing the \"iss\"", "expired device # # we now log in as a", "= b\"/_matrix/client/r0/account/whoami\" # a (valid) url with some annoying characters", "in. %3D is =, %26 is &, %2B is +", "# Not providing an audience. channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"],", "# whitelist this client URI so we redirect straight to", "a redirect for the given redirect URL.\"\"\" cas_ticket_url = (", "\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\":", "to the login page of the cas server self.assertEqual(cas_uri_path, CAS_SERVER", "b\"403\", channel.result) def test_login_appservice_no_token(self) -> None: \"\"\"Test that users must", "* 1000) self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000) # Now, submit a username", "ticket. cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url)) ) # Get", "should give us some HTML channel = self.make_request(\"GET\", uri) self.assertEqual(channel.code,", "that it contains our redirect link self.assertIn(redirect_url, channel.result[\"body\"].decode(\"UTF-8\")) @override_config( {", "subject claim.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"],", "issuer. channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\")", "import time import urllib.parse from typing import Any, Dict, List,", "6): self.register_user(\"kermit\" + str(i), \"monkey\") for i in range(0, 6):", "config[\"oidc_config\"][\"user_mapping_provider\"] = { \"config\": {\"display_name_template\": \"{{ user.displayname }}\"} } #", "so we redirect straight to it rather than # serving", "\"token\": login_token}, ) self.assertEqual(chan.code, 200, chan.result) self.assertEqual(chan.json_body[\"user_id\"], \"@user1:test\") def test_multi_sso_redirect_to_unknown(self)", "Dict[str, str] = {} channel.extract_cookies(cookies) self.assertIn(\"username_mapping_session\", cookies) session_id = cookies[\"username_mapping_session\"]", "which should 302 to the client redirectUrl chan = self.make_request(", "\"user1\"}) # that should serve a confirmation page self.assertEqual(channel.code, 200,", "self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Token", "assert cookie_headers cookies: Dict[str, str] = {} for h in", "200, channel.result) # parse the form to check it has", "expected_flow_types = [ \"m.login.cas\", \"m.login.sso\", \"m.login.token\", \"m.login.password\", ] + [f[\"type\"]", "self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_jwt_invalid_signature(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"},", "\"m.id.user\", \"user\": \"fibble_wibble\"}, } channel = self.make_request( b\"POST\", LOGIN_URL, params,", "chan.headers.getRawHeaders(\"Location\") assert location_headers # ensure that the returned location matches", "[ login.register_servlets, register.register_servlets, ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock)", "} channel = self.make_request( b\"DELETE\", \"devices/\" + device_id, access_token=access_token, content={\"auth\":", "in as a deactivated account should error.\"\"\" redirect_url = \"https://legit-site.com/\"", "username mapping session # looks ok. username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions self.assertIn(", "is chosen, should redirect to the SAML server\"\"\" channel =", "test_soft_logout(self) -> None: self.register_user(\"kermit\", \"monkey\") # we shouldn't be able", "pymacaroons.Macaroon, key: str) -> str: prefix = key + \"", "params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_user_bot(self) -> None:", "\"/login\", content={\"type\": \"m.login.token\", \"token\": login_token}, ) self.assertEqual(chan.code, 200, chan.result) self.assertEqual(chan.json_body[\"user_id\"],", "# Test that the body isn't empty. self.assertTrue(len(channel.result[\"body\"]) > 0)", "should 302 to the client redirectUrl chan = self.make_request( \"GET\",", "of the SAML server self.assertEqual(saml_uri_path, SAML_SERVER) # the RelayState is", "bytes] = jwt.encode(payload, secret, self.jwt_algorithm) if isinstance(result, bytes): return result.decode(\"ascii\")", "\"\"\"Test the happy path of a username picker flow.\"\"\" #", "# CAS server used in some tests CAS_SERVER = \"https://fake.test\"", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # ... but if we", "class UsernamePickerTestCase(HomeserverTestCase): \"\"\"Tests for the username picker flow of SSO", ") -> None: self.register_user(\"kermit\", \"monkey\") # log in as normal", "key, value = h.split(\";\")[0].split(\"=\", maxsplit=1) cookies[key] = value oidc_session_cookie =", "LOGIN_URL, params) return channel def test_login_jwt_valid_registered(self) -> None: self.register_user(\"kermit\", \"monkey\")", "ratelimiter. for i in range(0, 6): self.register_user(\"kermit\" + str(i), \"monkey\")", "[login.register_servlets] def default_config(self) -> Dict[str, Any]: config = super().default_config() config[\"public_baseurl\"]", "from tests.unittest import HomeserverTestCase, override_config, skip_unless try: import jwt HAS_JWT", "# # This is normally covered by the default test", "= oidc_uri.split(\"?\", 1) # it should redirect us to the", "as # http://..., so using http in the public_baseurl stops", "time passes self.reactor.advance(24 * 3600) # ... and we should", "the account. self.get_success( self.deactivate_account_handler.deactivate_account( self.user_id, False, create_requester(self.user_id) ) ) #", "# to the completion page content = urlencode({b\"username\": b\"bobby\"}).encode(\"utf8\") chan", "self.jwt_login({\"sub\": \"kermit\", \"iss\": \"test-issuer\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") #", "verification failed\", ) AS_USER = \"as_user_alice\" class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): servlets =", "Clock) -> HomeServer: self.hs = self.setup_test_homeserver() self.service = ApplicationService( id=\"unique_identifier\",", "{ \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\" + str(i)},", "= self.make_request( \"POST\", path=picker_url, content=content, content_is_form=True, custom_headers=[ (\"Cookie\", \"username_mapping_session=\" +", "token is not yet valid (nbf)\", ) def test_login_no_sub(self) ->", "self.redirect_path = \"_synapse/client/login/sso/redirect/confirm\" config = self.default_config() config[\"public_baseurl\"] = ( config.get(\"public_baseurl\")", "None: channel = self.jwt_login({\"sub\": \"frog\", \"exp\": 864000}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "[ login.register_servlets, ] def default_config(self) -> Dict[str, Any]: config =", "the XMLSecurity backend to avoid relying on xmlsec1 \"crypto_backend\": \"XMLSecurity\",", "test_login_iss(self) -> None: \"\"\"Test validating the issuer claim.\"\"\" # A", "\"\"\"Perform the UI-Auth to delete a device\"\"\" channel = self.make_request(", "have set a cookie including the redirect url cookie_headers =", "# This is normally covered by the default test homeserver", "\"@kermit:test\") def test_login_jwt_invalid_signature(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"}, self.bad_privatekey)", "[] self.hs.config.captcha.enable_registration_captcha = False return self.hs @override_config( { \"rc_login\": {", "CAS_SERVER, \"service_url\": \"https://matrix.goodserver.com:8448\", } config[\"saml2_config\"] = { \"sp_config\": { \"metadata\":", "with `openssl genrsa 512` (not a secure way to generate", "as the jwt_secret setting of synapse. Valid # tokens are", "serve the template. channel = self.make_request(\"GET\", cas_ticket_url) self.assertEqual(channel.code, 302) location_headers", "urllib.parse from typing import Any, Dict, List, Optional, Union from", "super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def test_get_login_flows(self) -> None: \"\"\"GET /login", "channel.result) # Since we're ratelimiting at 1 request/min, retry_after_ms should", "= { \"sp_config\": { \"metadata\": {\"inline\": [TEST_SAML_METADATA]}, # use the", "This needs to be returned by an async function (as", "[] self.hs.config.registration.auto_join_rooms = [] self.hs.config.captcha.enable_registration_captcha = False return self.hs @override_config(", "= { \"enabled\": True, \"secret\": self.jwt_pubkey, \"algorithm\": \"RS256\", } return", "an identity picker\"\"\" # first hit the redirect url, which", "= self.make_request(\"GET\", cas_ticket_url) self.assertEqual(channel.code, 302) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers", "HomeServer from synapse.types import create_requester from synapse.util import Clock from", "urlencode import pymacaroons from twisted.test.proto_helpers import MemoryReactor from twisted.web.resource import", "-> HomeServer: self.hs = self.setup_test_homeserver() self.hs.config.registration.enable_registration = True self.hs.config.registration.registrations_require_3pid =", "homeservers with multiple SSO providers enabled\"\"\" servlets = [ login.register_servlets,", "chosen, should redirect to the SAML server\"\"\" channel = self.make_request(", "return config def jwt_encode(self, payload: Dict[str, Any], secret: str =", "= self.make_request(\"GET\", \"/_matrix/client/r0/login\") self.assertEqual(channel.code, 200, channel.result) expected_flow_types = [ \"m.login.cas\",", "channel = self.jwt_login({\"sub\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def", "b\"200\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_all_sessions_after_being_soft_logged_out( self, ) -> None:", "( config.get(\"public_baseurl\") or \"https://matrix.goodserver.com:8448\" ) config[\"cas_config\"] = { \"enabled\": True,", "} channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) @skip_unless(HAS_OIDC,", "at 1 request/min, retry_after_ms should be lower # than 1min.", "validation failed: Signature verification failed\", ) def test_login_jwt_expired(self) -> None:", "Since we're ratelimiting at 1 request/min, retry_after_ms should be lower", "the redirectUrl is correctly encoded in the service param -", "channel = self._make_sso_redirect_request(\"xxx\") self.assertEqual(channel.code, 404, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_NOT_FOUND\") def test_client_idp_redirect_to_oidc(self)", "\"ticket\"}) url_parts[4] = urllib.parse.urlencode(query) cas_ticket_url = urllib.parse.urlunparse(url_parts) # Get Synapse", "access token channel = self.make_request(b\"GET\", TEST_URL) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "assert content_type_headers self.assertTrue(content_type_headers[-1].startswith(\"text/html\")) p = TestHtmlParser() p.feed(channel.text_body) p.close() # ...", "the pubkey. It is generated # with `openssl genrsa 512`", "\"https://fake.test\" # just enough to tell pysaml2 where to redirect", "-pubout`, with the the above # private key placed in", "we redirect straight to it rather than # serving a", "= [] for link in p.links: path, query = link.split(\"?\",", "chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers # ensure that the", "str = jwt_secret) -> str: # PyJWT 2.0.0 changed the", "def test_login_jwt_invalid_signature(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"}, self.bad_privatekey) self.assertEqual(channel.result[\"code\"],", "audience. channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"test-audience\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "user can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\":", "user to the redirect URL. \"\"\" base_url = \"/_matrix/client/r0/login/cas/ticket?redirectUrl\" redirect_url", "= self.helper.auth_via_oidc( {\"sub\": \"tester\", \"displayname\": \"Jonny\"}, TEST_CLIENT_REDIRECT_URL ) # that", "channel.headers.getRawHeaders(\"Location\") assert location_headers oidc_uri = location_headers[0] oidc_uri_path, oidc_uri_query = oidc_uri.split(\"?\",", "query.update({\"ticket\": \"ticket\"}) url_parts[4] = urllib.parse.urlencode(query) cas_ticket_url = urllib.parse.urlunparse(url_parts) # Get", "JWTTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ] jwt_secret = \"secret\"", "False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], }, ) self.another_service =", "the default OIDC provider channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" +", "= super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"oidc_config\"] = {} config[\"oidc_config\"].update(TEST_OIDC_CONFIG) config[\"oidc_config\"][\"user_mapping_provider\"]", "-> None: params = {\"type\": \"org.matrix.login.jwt\"} channel = self.make_request(b\"POST\", LOGIN_URL,", "\"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0\", \"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m\", \"-----END RSA PRIVATE KEY-----\", ] ) def default_config(self)", "# time passes self.reactor.advance(24 * 3600) # ... and we", "* 3600) # ... and we should be soft-logouted channel", "so synapse will see the requested uri as # http://...,", "normally covered by the default test homeserver config # which", "= \"synapse\" # public_baseurl for some tests. It uses an", "{\"per_second\": 10000, \"burst_count\": 10000}, } } ) def test_POST_ratelimiting_per_address(self) ->", "= channel.headers.getRawHeaders(\"Content-Type\") assert content_type_headers self.assertTrue(content_type_headers[-1].startswith(\"text/html\")) p = TestHtmlParser() p.feed(channel.text_body) p.close()", "public key configured in synapse as \"jwt_secret\", and tokens #", "15 minutes away expected_expiry = self.clock.time_msec() + (15 * 60", "and device id. chan = self.make_request( \"POST\", \"/login\", content={\"type\": \"m.login.token\",", "access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # Now", "an error template. self.assertEqual(channel.code, 403) self.assertIn(b\"SSO account deactivated\", channel.result[\"body\"]) @skip_unless(HAS_JWT,", "flows[\"m.login.sso\"][\"identity_providers\"], [ {\"id\": \"cas\", \"name\": \"CAS\"}, {\"id\": \"saml\", \"name\": \"SAML\"},", "the client redirect url saml_uri_params = urllib.parse.parse_qs(saml_uri_query) relay_state_param = saml_uri_params[\"RelayState\"][0]", "redirect URL. \"\"\" base_url = \"/_matrix/client/r0/login/cas/ticket?redirectUrl\" redirect_url = \"https://dodgy-site.com/\" url_parts", "a secure way to generate real keys, but # good", "\"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\"}, \"password\": \"<PASSWORD>\", } channel =", "+ urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) return self.make_request( \"GET\", endpoint, custom_headers=[(\"Host\", SYNAPSE_SERVER_PUBLIC_HOSTNAME)], ) @staticmethod", "f in channel.json_body[\"flows\"]], expected_flow_types ) flows = {flow[\"type\"]: flow for", "@skip_unless(HAS_OIDC, \"requires OIDC\") class UsernamePickerTestCase(HomeserverTestCase): \"\"\"Tests for the username picker", "cookies[key] = value oidc_session_cookie = cookies[\"oidc_session\"] macaroon = pymacaroons.Macaroon.deserialize(oidc_session_cookie) self.assertEqual(", "b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid", "def test_cas_redirect_login_fallback(self) -> None: self._test_redirect(\"https://example.com/_matrix/static/client/login\") def _test_redirect(self, redirect_url: str) ->", "channel.headers.getRawHeaders(\"Set-Cookie\") assert cookie_headers cookies: Dict[str, str] = {} for h", "\"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: The token is not", "shouldn't be accepted by synapse. # Generated just like jwt_privatekey.", "(SYNAPSE_SERVER_PUBLIC_HOSTNAME,) # CAS server used in some tests CAS_SERVER =", "jwt_algorithm = \"HS256\" base_config = { \"enabled\": True, \"secret\": jwt_secret,", "channel.result) def test_login_appservice_wrong_user(self) -> None: \"\"\"Test that non-as users cannot", "make a login request with the bad device_id channel =", "\"/\" + idp_prov endpoint += \"?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) return self.make_request(", "access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # ...", "= \"yolo\" * 512 body = { \"type\": \"m.login.password\", \"user\":", "self._delete_device(access_token_2, \"kermit\", \"monkey\", device_id) channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code,", "the matrix login token to the login API, which gives", "validating the issuer claim.\"\"\" # A valid issuer. channel =", "import devices, login, logout, register from synapse.rest.client.account import WhoamiRestServlet from", "def test_login_jwt_not_before(self) -> None: now = int(time.time()) channel = self.jwt_login({\"sub\":", "in writing, software # distributed under the License is distributed", "[\"cas\", \"oidc\", \"oidc-idp1\", \"saml\"]) def test_multi_sso_redirect_to_cas(self) -> None: \"\"\"If CAS", "\"\"\" <md:EntityDescriptor xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\"> <md:IDPSSODescriptor protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\"> <md:SingleSignOnService Binding=\"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect\" Location=\"%(SAML_SERVER)s\"/> </md:IDPSSODescriptor> </md:EntityDescriptor>", "to the redirect URL. \"\"\" base_url = \"/_matrix/client/r0/login/cas/ticket?redirectUrl\" redirect_url =", "First login (to create the user). self._test_redirect(redirect_url) # Deactivate the", "= \"_synapse/client/login/sso/redirect/confirm\" config = self.default_config() config[\"public_baseurl\"] = ( config.get(\"public_baseurl\") or", "time import urllib.parse from typing import Any, Dict, List, Optional,", "= self.jwt_login({\"sub\": \"frog\", \"exp\": 864000}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\")", "channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=saml\", )", "self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Invalid JWT\") @override_config({\"jwt_config\": {**base_config, \"issuer\": \"test-issuer\"}}) def", "should be soft-logouted channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401,", "to avoid relying on xmlsec1 \"crypto_backend\": \"XMLSecurity\", }, } #", "LOGIN_URL, params) self.assertEqual(channel.code, 200, channel.result) access_token = channel.json_body[\"access_token\"] device_id =", "0.17, \"burst_count\": 5}, # Prevent the account login ratelimiter from", "def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_secret) ->", "\"\"\" % { \"SAML_SERVER\": SAML_SERVER, } LOGIN_URL = b\"/_matrix/client/r0/login\" TEST_URL", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) # Since we're ratelimiting at 1 request/min,", "\"oidc-idp1\", \"saml\"]) def test_multi_sso_redirect_to_cas(self) -> None: \"\"\"If CAS is chosen,", "@staticmethod def _get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str) -> str: prefix =", "= self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.code, 200, channel.result) access_token = channel.json_body[\"access_token\"]", "token=\"some_token\", hostname=\"example.com\", sender=\"@asbot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as_user.*\", \"exclusive\": False}", "self.assertEqual(channel.code, 400, channel.result) def test_client_idp_redirect_to_unknown(self) -> None: \"\"\"If the client", "to JWTTestCase where we instead use # RSS256, with a", "Token is missing the \"iss\" claim', ) def test_login_iss_no_config(self) ->", "= jwt.encode(payload, secret, \"RS256\") if isinstance(result, bytes): return result.decode(\"ascii\") return", "= { \"config\": {\"display_name_template\": \"{{ user.displayname }}\"} } # whitelist", "channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid issuer. channel = self.jwt_login({\"sub\":", "\"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_no_token(self) -> None:", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "accepted by synapse. # Generated just like jwt_privatekey. bad_privatekey =", "self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # fish the login token out", "redirect url, which should redirect to our idp picker channel", "License, Version 2.0 (the \"License\"); # you may not use", "generate real keys, but # good enough for tests!) jwt_privatekey", "set(channel.json_body.keys()), {\"flows\", \"params\", \"session\"}, channel.result, ) auth = { \"type\":", "saml_uri_params = urllib.parse.parse_qs(saml_uri_query) relay_state_param = saml_uri_params[\"RelayState\"][0] self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL) def test_login_via_oidc(self)", "{\"type\": \"m.id.user\", \"user\": self.service.sender}, } channel = self.make_request( b\"POST\", LOGIN_URL,", "} ) def test_POST_ratelimiting_per_account(self) -> None: self.register_user(\"kermit\", \"monkey\") for i", "self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], False)", "= link.split(\"?\", 1) self.assertEqual(path, \"pick_idp\") params = urllib.parse.parse_qs(query) self.assertEqual(params[\"redirectUrl\"], [TEST_CLIENT_REDIRECT_URL])", "= {} config[\"oidc_config\"].update(TEST_OIDC_CONFIG) config[\"oidc_config\"][\"user_mapping_provider\"] = { \"config\": {\"display_name_template\": \"{{ user.displayname", "Token is missing the \"aud\" claim', ) def test_login_aud_no_config(self) ->", "None: channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\")", "{\"client_whitelist\": [\"https://x\"]} return config def create_resource_dict(self) -> Dict[str, Resource]: d", "page before redirecting a user to the redirect URL. \"\"\"", "} def default_config(self) -> Dict[str, Any]: config = super().default_config() #", "b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"audiences\": [\"test-audience\"]}}) def test_login_aud(self)", "we instead use # RSS256, with a public key configured", "200, channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_after_being_soft_logged_out(self) -> None: self.register_user(\"kermit\", \"monkey\")", "\"\"\"Send a request to /_matrix/client/r0/login/sso/redirect ... possibly specifying an IDP", "used as the jwt_secret setting of synapse. Valid # tokens", "[]): if header[0] == b\"Content-Type\": content_type_header_value = header[1].decode(\"utf8\") self.assertTrue(content_type_header_value.startswith(\"text/html\")) #", "\"@user1:test\") def test_multi_sso_redirect_to_unknown(self) -> None: \"\"\"An unknown IdP should cause", "header[1].decode(\"utf8\") self.assertTrue(content_type_header_value.startswith(\"text/html\")) # Test that the body isn't empty. self.assertTrue(len(channel.result[\"body\"])", "server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) def _make_sso_redirect_request(self, idp_prov: Optional[str] = None) ->", "config[\"oidc_config\"].update(TEST_OIDC_CONFIG) config[\"oidc_config\"][\"user_mapping_provider\"] = { \"config\": {\"display_name_template\": \"{{ user.displayname }}\"} }", "params properly, so we'll have to parse them params =", "\"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.code, 200,", "self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid audience\" )", ") session = username_mapping_sessions[session_id] self.assertEqual(session.remote_user_id, \"tester\") self.assertEqual(session.display_name, \"Jonny\") self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL)", "# good enough for tests!) jwt_privatekey = \"\\n\".join( [ \"-----BEGIN", "= location_headers[0] # hitting that picker should give us some", "the License for the specific language governing permissions and #", "failed: Invalid issuer\" ) # Not providing an issuer. channel", "\"\\n\".join( [ \"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\", \"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L\", \"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY\", \"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I\",", "def test_get_login_flows(self) -> None: \"\"\"GET /login should return password and", "1) self.assertEqual(path, \"https://x\") # it will have url-encoded the params", "in synapse as \"jwt_secret\", and tokens # signed by the", "} channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"],", "\"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\"}, \"password\": \"<PASSWORD>\", }", "from the default subject claim.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"],", "{\"type\": \"m.id.user\", \"user\": user_id}, \"user\": user_id, \"password\": password, \"session\": channel.json_body[\"session\"],", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.hs =", "\"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "characters device_id = \"yolo\" * 512 body = { \"type\":", "\"24h\"}) def test_session_can_hard_logout_all_sessions_after_being_soft_logged_out( self, ) -> None: self.register_user(\"kermit\", \"monkey\") #", "is deactivated they are served an error template. self.assertEqual(channel.code, 403)", "\"jwt_secret\", and tokens # signed by the private key. @skip_unless(HAS_JWT,", "the returned redirect uri login_token = params[2][1] # finally, submit", "custom_headers=[(\"Cookie\", \"username_mapping_session=\" + session_id)], ) self.assertEqual(chan.code, 302, chan.result) location_headers =", "json import time import urllib.parse from typing import Any, Dict,", "# log in as normal access_token = self.login(\"kermit\", \"monkey\") #", "will be a proper logout self._delete_device(access_token_2, \"kermit\", \"monkey\", device_id) channel", "\"\"\"GET /login should return password and SSO flows\"\"\" channel =", "for each href returned_idps: List[str] = [] for link in", "\"failed_attempts\": {\"per_second\": 0.17, \"burst_count\": 5}, } } ) def test_POST_ratelimiting_per_account_failed_attempts(self)", "self.jwt_login({\"sub\": \"frog\", \"exp\": 864000}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(", "None: channel = self.jwt_login({\"sub\": \"frog\"}, \"notsecret\") self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "SYNAPSE_SERVER_PUBLIC_HOSTNAME = \"synapse\" # public_baseurl for some tests. It uses", "% { \"SAML_SERVER\": SAML_SERVER, } LOGIN_URL = b\"/_matrix/client/r0/login\" TEST_URL =", "location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers uri = location_headers[0] # hitting", "urllib.parse.parse_qs(saml_uri_query) relay_state_param = saml_uri_params[\"RelayState\"][0] self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL) def test_login_via_oidc(self) -> None:", "302) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers self.assertEqual(location_headers[0][: len(redirect_url)], redirect_url) @override_config({\"sso\":", "1 request/min, retry_after_ms should be lower # than 1min. self.assertTrue(retry_after_ms", "custom_headers=[ (\"Cookie\", \"username_mapping_session=\" + session_id), # old versions of twisted", "HAS_OIDC, \"Requires SAML2 and OIDC\") class MultiSSOTestCase(unittest.HomeserverTestCase): \"\"\"Tests for homeservers", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "cookie_headers cookies: Dict[str, str] = {} for h in cookie_headers:", "that should serve a confirmation page self.assertEqual(channel.code, 200, channel.result) content_type_headers", "us our # matrix access token, mxid, and device id.", "the SSO login flow serves a confirmation page before redirecting", "= urllib.parse.parse_qs(cas_uri_query) service_uri = cas_uri_params[\"service\"][0] _, service_uri_query = service_uri.split(\"?\", 1)", "\"issuer\": \"https://issuer1\", \"client_id\": \"test-client-id\", \"client_secret\": \"test-client-secret\", \"scopes\": [\"profile\"], \"authorization_endpoint\": \"https://issuer1/auth\",", "{ \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\"}, \"password\": \"<PASSWORD>\",", "URL path, query = location_headers[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") # it", "the auth page of the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) def", "account should error.\"\"\" redirect_url = \"https://legit-site.com/\" # First login (to", "\"\"\"Tests for homeservers with multiple SSO providers enabled\"\"\" servlets =", "SSO login\"\"\" servlets = [login.register_servlets] def default_config(self) -> Dict[str, Any]:", "to check that the username mapping session # looks ok.", "token, mxid, and device id. chan = self.make_request( \"POST\", \"/login\",", "login (to create the user). self._test_redirect(redirect_url) # Deactivate the account.", "\"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"subject_claim\": \"username\"}}) def test_login_custom_sub(self) -> None: \"\"\"Test", "the return type of jwt.encode from bytes to str. result:", "jwt.encode(payload, secret, self.jwt_algorithm) if isinstance(result, bytes): return result.decode(\"ascii\") return result", "as well \"account\": {\"per_second\": 10000, \"burst_count\": 10000}, } } )", "saml_uri_path, saml_uri_query = saml_uri.split(\"?\", 1) # it should redirect us", "that the appservice bot can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params", "test_multi_sso_redirect(self) -> None: \"\"\"/login/sso/redirect should redirect to an identity picker\"\"\"", "channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 200, channel.result) # time", "\"SAML_SERVER\": SAML_SERVER, } LOGIN_URL = b\"/_matrix/client/r0/login\" TEST_URL = b\"/_matrix/client/r0/account/whoami\" #", "give us some HTML channel = self.make_request(\"GET\", uri) self.assertEqual(channel.code, 200,", "\"https://issuer1/auth\", \"token_endpoint\": \"https://issuer1/token\", \"userinfo_endpoint\": \"https://issuer1/userinfo\", \"user_mapping_provider\": { \"config\": {\"localpart_template\": \"{{", "KEY-----\", ] ) # Generated with `openssl rsa -in foo.key", "in the configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"],", "\"config\": {\"display_name_template\": \"{{ user.displayname }}\"} } # whitelist this client", "register.register_servlets, ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:", "= urllib.parse.parse_qs(saml_uri_query) relay_state_param = saml_uri_params[\"RelayState\"][0] self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL) def test_login_via_oidc(self) ->", "= \"https://fake.test\" # just enough to tell pysaml2 where to", "_make_sso_redirect_request(self, idp_prov: Optional[str] = None) -> FakeChannel: \"\"\"Send a request", "except ImportError: HAS_JWT = False # synapse server name: used", "redirect straight to it rather than # serving a confirmation", "= \"secret\" jwt_algorithm = \"HS256\" base_config = { \"enabled\": True,", "+ TEST_CLIENT_REDIRECT_URL = 'https://x?<ab c>&q\"+%3D%2B\"=\"fö%26=o\"' # the query params in", "channel.json_body[\"error\"], \"JWT validation failed: Signature verification failed\", ) def test_login_jwt_expired(self)", "cookies[\"oidc_session\"] macaroon = pymacaroons.Macaroon.deserialize(oidc_session_cookie) self.assertEqual( self._get_value_from_macaroon(macaroon, \"client_redirect_url\"), TEST_CLIENT_REDIRECT_URL, ) channel", "be able to make requests with the access token channel", "ApplicationService( id=\"unique_identifier\", token=\"some_token\", hostname=\"example.com\", sender=\"@asbot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as_user.*\",", "import pymacaroons from twisted.test.proto_helpers import MemoryReactor from twisted.web.resource import Resource", "= BASE_URL config[\"oidc_config\"] = {} config[\"oidc_config\"].update(TEST_OIDC_CONFIG) config[\"oidc_config\"][\"user_mapping_provider\"] = { \"config\":", "def _delete_device( self, access_token: str, user_id: str, password: str, device_id:", "# distributed under the License is distributed on an \"AS", "# rc_login dict here, we need to set this manually", "# Unless required by applicable law or agreed to in", "our redirect link self.assertEqual(len(p.links), 1) path, query = p.links[0].split(\"?\", 1)", "tests!) jwt_privatekey = \"\\n\".join( [ \"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\",", "jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_secret) -> str:", "failed: Token is missing the \"iss\" claim', ) def test_login_iss_no_config(self)", "channel = self.jwt_login({\"sub\": \"frog\"}, self.bad_privatekey) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\")", "= channel.headers.getRawHeaders(\"Location\") assert location_headers oidc_uri = location_headers[0] oidc_uri_path, oidc_uri_query =", "d def test_get_login_flows(self) -> None: \"\"\"GET /login should return password", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "channel.headers.getRawHeaders(\"Location\") assert location_headers cas_uri = location_headers[0] cas_uri_path, cas_uri_query = cas_uri.split(\"?\",", "\"OIDC\"}, ], ) def test_multi_sso_redirect(self) -> None: \"\"\"/login/sso/redirect should redirect", "url saml_uri_params = urllib.parse.parse_qs(saml_uri_query) relay_state_param = saml_uri_params[\"RelayState\"][0] self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL) def", "provider channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=oidc\",", "WhoamiRestServlet(hs).register(http_server), ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:", "-> None: \"\"\"Test providing an issuer claim without requiring it", "Synapse to call the fake CAS and serve the template.", "class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, register.register_servlets, ] def make_homeserver(self,", "= ApplicationService( id=\"unique_identifier\", token=\"some_token\", hostname=\"example.com\", sender=\"@asbot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\":", "= [] self.hs.config.registration.auto_join_rooms = [] self.hs.config.captcha.enable_registration_captcha = False return self.hs", "the username picker, which should serve a redirect # to", "\"GET\", path=location_headers[0], custom_headers=[(\"Cookie\", \"username_mapping_session=\" + session_id)], ) self.assertEqual(chan.code, 302, chan.result)", "self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_no_token(self) -> None: params = {\"type\": \"org.matrix.login.jwt\"}", "sets these values to 10000, but as we're overriding the", "by the private key. @skip_unless(HAS_JWT, \"requires jwt\") class JWTPubKeyTestCase(unittest.HomeserverTestCase): servlets", "# than 1min. self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms / 1000.0 +", "picker flow of SSO login\"\"\" servlets = [login.register_servlets] def default_config(self)", "self.reactor.advance(retry_after_ms / 1000.0) params = { \"type\": \"m.login.password\", \"identifier\": {\"type\":", "trying to redirect to # https://.... BASE_URL = \"http://%s/\" %", "None: \"\"\"If the client tries to pick an unknown IdP,", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], False) def _delete_device( self, access_token: str,", "validation failed: Signature verification failed\", ) AS_USER = \"as_user_alice\" class", "a token when using the appservice login method \"\"\" self.register_appservice_user(AS_USER,", "redirect uri login_token = params[2][1] # finally, submit the matrix", "self.assertEqual(channel.json_body[\"soft_logout\"], True) # Now try to hard log out all", "login.register_servlets, logout.register_servlets, devices.register_servlets, lambda hs, http_server: WhoamiRestServlet(hs).register(http_server), ] def make_homeserver(self,", "self._get_value_from_macaroon(macaroon, \"client_redirect_url\"), TEST_CLIENT_REDIRECT_URL, ) channel = self.helper.complete_oidc_auth(oidc_uri, cookies, {\"sub\": \"user1\"})", "the Apache License, Version 2.0 (the \"License\"); # you may", "%s caveat in macaroon\" % (key,)) class CASTestCase(unittest.HomeserverTestCase): servlets =", "\"session\": channel.json_body[\"session\"], } channel = self.make_request( b\"DELETE\", \"devices/\" + device_id,", "self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed:", "pick a known IdP, redirect to it\"\"\" channel = self._make_sso_redirect_request(\"oidc\")", "Union[str, bytes] = jwt.encode(payload, secret, self.jwt_algorithm) if isinstance(result, bytes): return", "Union[bytes, str] = jwt.encode(payload, secret, \"RS256\") if isinstance(result, bytes): return", "a request to the completion page, which should 302 to", "try to hard logout this session channel = self.make_request(b\"POST\", \"/logout\",", "annoying characters in. %3D is =, %26 is &, %2B", "p.feed(html) p.close() # there should be a link for each", "= self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz\", ) self.assertEqual(channel.code, 400, channel.result) def test_client_idp_redirect_to_unknown(self)", "302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers saml_uri = location_headers[0]", "channel.result) expected_flow_types = [ \"m.login.cas\", \"m.login.sso\", \"m.login.token\", \"m.login.password\", ] +", "session_id, username_mapping_sessions, \"session id not found in map\", ) session", "# use the XMLSecurity backend to avoid relying on xmlsec1", "session_id = cookies[\"username_mapping_session\"] # introspect the sso handler a bit", ") channel = self.helper.complete_oidc_auth(oidc_uri, cookies, {\"sub\": \"user1\"}) # that should", "for tests!) jwt_privatekey = \"\\n\".join( [ \"-----BEGIN RSA PRIVATE KEY-----\",", "urllib.parse import urlencode import pymacaroons from twisted.test.proto_helpers import MemoryReactor from", "MemoryReactor from twisted.web.resource import Resource import synapse.rest.admin from synapse.appservice import", "Create different users so we're sure not to be bothered", "str] = {} channel.extract_cookies(cookies) self.assertIn(\"username_mapping_session\", cookies) session_id = cookies[\"username_mapping_session\"] #", "List, Optional, Union from unittest.mock import Mock from urllib.parse import", "# finally, submit the matrix login token to the login", "{\"type\": \"m.id.user\", \"user\": AS_USER}, } channel = self.make_request( b\"POST\", LOGIN_URL,", "# fish the login token out of the returned redirect", "[ \"-----BEGIN PUBLIC KEY-----\", \"<KEY>\", \"<KEY> \"-----END PUBLIC KEY-----\", ]", "True) # Now try to hard log out all of", "id. login_token = params[2][1] chan = self.make_request( \"POST\", \"/login\", content={\"type\":", "('q\" =+\"', '\"fö&=o\"')] # (possibly experimental) login flows we expect", "+ urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=oidc\", ) self.assertEqual(channel.code, 302, channel.result) location_headers =", "the template. channel = self.make_request(\"GET\", cas_ticket_url) self.assertEqual(channel.code, 302) location_headers =", "= self.make_request( \"POST\", \"/_matrix/client/v3/login\", json.dumps(body).encode(\"utf8\"), custom_headers=None, ) # test that", "( \"\"\" <cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'> <cas:authenticationSuccess> <cas:user>%s</cas:user> <cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket> <cas:proxies> <cas:proxy>https://proxy2/pgtUrl</cas:proxy> <cas:proxy>https://proxy1/pgtUrl</cas:proxy>", "self.helper.auth_via_oidc( {\"sub\": \"tester\", \"displayname\": \"Jonny\"}, TEST_CLIENT_REDIRECT_URL ) # that should", "servlets = [ login.register_servlets, ] # This key's pubkey is", "@override_config({\"jwt_config\": {**base_config, \"audiences\": [\"test-audience\"]}}) def test_login_aud(self) -> None: \"\"\"Test validating", "content_type_headers = channel.headers.getRawHeaders(\"Content-Type\") assert content_type_headers self.assertTrue(content_type_headers[-1].startswith(\"text/html\")) p = TestHtmlParser() p.feed(channel.text_body)", "account. self.get_success( self.deactivate_account_handler.deactivate_account( self.user_id, False, create_requester(self.user_id) ) ) # Request", "value oidc_session_cookie = cookies[\"oidc_session\"] macaroon = pymacaroons.Macaroon.deserialize(oidc_session_cookie) self.assertEqual( self._get_value_from_macaroon(macaroon, \"client_redirect_url\"),", "session_id), # old versions of twisted don't do form-parsing without", "= False # synapse server name: used to populate public_baseurl", "query, keep_blank_values=True, strict_parsing=True, errors=\"strict\" ) self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") #", "@override_config({\"public_baseurl\": \"https://example.com\"}) def test_cas_redirect_login_fallback(self) -> None: self._test_redirect(\"https://example.com/_matrix/static/client/login\") def _test_redirect(self, redirect_url:", "{\"sub\": \"tester\", \"displayname\": \"Jonny\"}, TEST_CLIENT_REDIRECT_URL ) # that should redirect", "self.assertEqual(params[2][0], \"loginToken\") # fish the login token out of the", "location_headers saml_uri = location_headers[0] saml_uri_path, saml_uri_query = saml_uri.split(\"?\", 1) #", "access token channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 200, channel.result)", "well \"address\": {\"per_second\": 10000, \"burst_count\": 10000}, } } ) def", "header. (\"Content-Length\", str(len(content))), ], ) self.assertEqual(chan.code, 302, chan.result) location_headers =", "\"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], 'JWT validation", "i in range(0, 6): params = { \"type\": \"m.login.password\", \"identifier\":", "the completion page, which should 302 to the client redirectUrl", "self.assertEqual(channel.code, 400) self.assertEqual(channel.json_body[\"errcode\"], \"M_INVALID_PARAM\") @skip_unless(has_saml2 and HAS_OIDC, \"Requires SAML2 and", "= location_headers[0] self.assertEqual(picker_url, \"/_synapse/client/pick_username/account_details\") # ... with a username_mapping_session cookie", "bytes to str. result: Union[str, bytes] = jwt.encode(payload, secret, self.jwt_algorithm)", "public_baseurl for some tests. It uses an http:// scheme because", "self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers cas_uri =", "d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def test_username_picker(self) -> None:", "# default OIDC provider config[\"oidc_config\"] = TEST_OIDC_CONFIG # additional OIDC", "import Mock from urllib.parse import urlencode import pymacaroons from twisted.test.proto_helpers", "-> bytes: \"\"\"Return an example response payload from a call", "OIDC auth endpoint\"\"\" # pick the default OIDC provider channel", "page config[\"sso\"] = {\"client_whitelist\": [\"https://x\"]} return config def create_resource_dict(self) ->", "test_login_no_sub(self) -> None: channel = self.jwt_login({\"username\": \"root\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "generated # with `openssl genrsa 512` (not a secure way", ") AS_USER = \"as_user_alice\" class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets,", "import has_saml2 from tests.rest.client.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG from tests.server import", "\"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) if i", "= { \"type\": \"m.login.password\", # https://github.com/matrix-org/synapse/issues/5665 # \"identifier\": {\"type\": \"m.id.user\",", "bytes): return result.decode(\"ascii\") return result def jwt_login(self, *args: Any) ->", "config[\"jwt_config\"] = { \"enabled\": True, \"secret\": self.jwt_pubkey, \"algorithm\": \"RS256\", }", "Request the CAS ticket. cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url))", "IDP provider \"\"\" endpoint = \"/_matrix/client/r0/login/sso/redirect\" if idp_prov is not", "self.make_request( \"GET\", endpoint, custom_headers=[(\"Host\", SYNAPSE_SERVER_PUBLIC_HOSTNAME)], ) @staticmethod def _get_value_from_macaroon(macaroon: pymacaroons.Macaroon,", "self.assertEqual(channel.json_body[\"soft_logout\"], True) # Now try to hard logout this session", "b\"Content-Type\": content_type_header_value = header[1].decode(\"utf8\") self.assertTrue(content_type_header_value.startswith(\"text/html\")) # Test that the body", "{ \"type\": \"m.login.password\", # https://github.com/matrix-org/synapse/issues/5665 # \"identifier\": {\"type\": \"m.id.user\", \"user\":", "requested redirect URL path, query = location_headers[0].split(\"?\", 1) self.assertEqual(path, \"https://x\")", "int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) # Since we're ratelimiting at", "import ApplicationService from synapse.rest.client import devices, login, logout, register from", "with a username_mapping_session cookie cookies: Dict[str, str] = {} channel.extract_cookies(cookies)", "user). self._test_redirect(redirect_url) # Deactivate the account. self.get_success( self.deactivate_account_handler.deactivate_account( self.user_id, False,", "= self.jwt_login({\"username\": \"root\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Invalid", "dict(urllib.parse.parse_qsl(url_parts[4])) query.update({\"redirectUrl\": redirect_url}) query.update({\"ticket\": \"ticket\"}) url_parts[4] = urllib.parse.urlencode(query) cas_ticket_url =", "CAS server\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) +", "real keys, but # good enough for tests!) jwt_privatekey =", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: The token", "token=\"another_token\", hostname=\"example.com\", sender=\"@as2bot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as2_user.*\", \"exclusive\": False}", "tests. It uses an http:// scheme because # FakeChannel.isSecure() returns", "login.register_servlets, ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:", "# just enough to tell pysaml2 where to redirect to", "under the License is distributed on an \"AS IS\" BASIS,", "{\"id\": \"saml\", \"name\": \"SAML\"}, {\"id\": \"oidc-idp1\", \"name\": \"IDP1\"}, {\"id\": \"oidc\",", "claim', ) def test_login_aud_no_config(self) -> None: \"\"\"Test providing an audience", "\"GET\", \"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz\", ) self.assertEqual(channel.code, 400, channel.result) def test_client_idp_redirect_to_unknown(self) -> None:", "= chan.headers.getRawHeaders(\"Location\") assert location_headers # ensure that the returned location", "location_headers uri = location_headers[0] # hitting that picker should give", "urllib.parse.parse_qs(cas_uri_query) service_uri = cas_uri_params[\"service\"][0] _, service_uri_query = service_uri.split(\"?\", 1) service_uri_params", "401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # ... but if", "in this class html = channel.result[\"body\"].decode(\"utf-8\") p = TestHtmlParser() p.feed(html)", "\"user\": \"kermit\"}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params)", "CAS will redirect to cas_uri_params = urllib.parse.parse_qs(cas_uri_query) service_uri = cas_uri_params[\"service\"][0]", "payload: Dict[str, Any], secret: str = jwt_secret) -> str: #", "= BASE_URL config[\"cas_config\"] = { \"enabled\": True, \"server_url\": CAS_SERVER, \"service_url\":", "be bothered by the per-user # ratelimiter. for i in", "to set this manually as well \"address\": {\"per_second\": 10000, \"burst_count\":", "urllib.parse.parse_qsl( query, keep_blank_values=True, strict_parsing=True, errors=\"strict\" ) self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\")", "default_config(self) -> Dict[str, Any]: config = super().default_config() # If jwt_config", "\"user\": user_id, \"password\": password, \"session\": channel.json_body[\"session\"], } channel = self.make_request(", "to pick an unknown IdP, return a 404\"\"\" channel =", "PRIVATE KEY-----\", \"<KEY>\", \"<KEY>\", \"<KEY>KVaZ/gTOM9+9MwlmhidrUOweKfB/\", \"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq\", \"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN\", \"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA\", \"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=\", \"-----END", "\"\"\"/login/sso/redirect should redirect to an identity picker\"\"\" # first hit", "(valid) url with some annoying characters in. %3D is =,", "tries to pick an unknown IdP, return a 404\"\"\" channel", "elsewhere in this class html = channel.result[\"body\"].decode(\"utf-8\") p = TestHtmlParser()", "channel = self.make_request(\"GET\", cas_ticket_url) # Test that the response is", "are signed by this and validated using the pubkey. It", "time should be about 15 minutes away expected_expiry = self.clock.time_msec()", "Synapse trying to redirect to # https://.... BASE_URL = \"http://%s/\"", "(15 * 60 * 1000) self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000) # Now,", "the expired token should still return a soft-logout self.reactor.advance(3600) channel", "\"loginToken\") # fish the login token out of the returned", "self.assertEqual(saml_uri_path, SAML_SERVER) # the RelayState is used to carry the", "self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # # test", "Clock) -> HomeServer: self.hs = self.setup_test_homeserver() self.hs.config.registration.enable_registration = True self.hs.config.registration.registrations_require_3pid", "# ratelimiter. for i in range(0, 6): self.register_user(\"kermit\" + str(i),", "None: \"\"\"Test validating the audience claim.\"\"\" # A valid audience.", "placed in foo.key (jwt_privatekey). jwt_pubkey = \"\\n\".join( [ \"-----BEGIN PUBLIC", "b\"200\", channel.result) def test_login_with_overly_long_device_id_fails(self) -> None: self.register_user(\"mickey\", \"<PASSWORD>\") # create", "chan.headers.getRawHeaders(\"Location\") assert location_headers # send a request to the completion", "will see the requested uri as # http://..., so using", "} } ) def test_POST_ratelimiting_per_account(self) -> None: self.register_user(\"kermit\", \"monkey\") for", "identity picker\"\"\" # first hit the redirect url, which should", "SAML server self.assertEqual(saml_uri_path, SAML_SERVER) # the RelayState is used to", "the CAS ticket. cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url)) )", "issuer. channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers # ensure that the returned", "= self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"],", "None: \"\"\"Test reading user ID from the default subject claim.\"\"\"", "= {flow[\"type\"]: flow for flow in channel.json_body[\"flows\"]} self.assertCountEqual( flows[\"m.login.sso\"][\"identity_providers\"], [", "10000, \"burst_count\": 10000}, } } ) def test_POST_ratelimiting_per_account(self) -> None:", "def test_multi_sso_redirect_to_unknown(self) -> None: \"\"\"An unknown IdP should cause a", "def test_login_custom_sub(self) -> None: \"\"\"Test reading user ID from a", "a complement to JWTTestCase where we instead use # RSS256,", "self.assertCountEqual( [f[\"type\"] for f in channel.json_body[\"flows\"]], expected_flow_types ) flows =", "{ \"client_whitelist\": [ \"https://legit-site.com/\", \"https://other-site.com/\", ] } } ) def", "logout, register from synapse.rest.client.account import WhoamiRestServlet from synapse.rest.synapse.client import build_synapse_client_resource_tree", "\"session\"}, channel.result, ) auth = { \"type\": \"m.login.password\", # https://github.com/matrix-org/synapse/issues/5665", "self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(", "redirect_url = \"https://dodgy-site.com/\" url_parts = list(urllib.parse.urlparse(base_url)) query = dict(urllib.parse.parse_qsl(url_parts[4])) query.update({\"redirectUrl\":", "idp_prov is not None: endpoint += \"/\" + idp_prov endpoint", "for h in cookie_headers: key, value = h.split(\";\")[0].split(\"=\", maxsplit=1) cookies[key]", "JWT\") @override_config({\"jwt_config\": {**base_config, \"issuer\": \"test-issuer\"}}) def test_login_iss(self) -> None: \"\"\"Test", "location_headers[0] saml_uri_path, saml_uri_query = saml_uri.split(\"?\", 1) # it should redirect", "\"burst_count\": 5}, # Prevent the address login ratelimiter from raising", "xmlsec1 \"crypto_backend\": \"XMLSecurity\", }, } # default OIDC provider config[\"oidc_config\"]", "the response is HTML. self.assertEqual(channel.code, 200, channel.result) content_type_header_value = \"\"", "sender=\"@as2bot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as2_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS:", "the per-user # ratelimiter. for i in range(0, 6): self.register_user(\"kermit\"", "matrix access token, mxid, and device id. chan = self.make_request(", "self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers oidc_uri =", "login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": \"fibble_wibble\"}, } channel = self.make_request(", "issuer claim.\"\"\" # A valid issuer. channel = self.jwt_login({\"sub\": \"kermit\",", "maxsplit=1) cookies[key] = value oidc_session_cookie = cookies[\"oidc_session\"] macaroon = pymacaroons.Macaroon.deserialize(oidc_session_cookie)", "import HAS_OIDC from tests.handlers.test_saml import has_saml2 from tests.rest.client.utils import TEST_OIDC_AUTH_ENDPOINT,", "channel.json_body[\"session\"], } channel = self.make_request( b\"DELETE\", \"devices/\" + device_id, access_token=access_token,", "config[\"cas_config\"] = { \"enabled\": True, \"server_url\": CAS_SERVER, \"service_url\": \"https://matrix.goodserver.com:8448\", }", "self.assertEqual(params[\"redirectUrl\"], [TEST_CLIENT_REDIRECT_URL]) returned_idps.append(params[\"idp\"][0]) self.assertCountEqual(returned_idps, [\"cas\", \"oidc\", \"oidc-idp1\", \"saml\"]) def test_multi_sso_redirect_to_cas(self)", "matrix access token, mxid, and device id. login_token = params[2][1]", "302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers cas_uri = location_headers[0]", "b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Signature", "looks ok. username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions self.assertIn( session_id, username_mapping_sessions, \"session id", "url cookie_headers = channel.headers.getRawHeaders(\"Set-Cookie\") assert cookie_headers cookies: Dict[str, str] =", "</cas:serviceResponse> \"\"\" % cas_user_id ).encode(\"utf-8\") mocked_http_client = Mock(spec=[\"get_raw\"]) mocked_http_client.get_raw.side_effect =", "\"m.id.user\", \"user\": self.service.sender}, } channel = self.make_request( b\"POST\", LOGIN_URL, params,", "location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers # send a request to", ") self.assertEqual(channel.code, 200, channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_after_being_soft_logged_out(self) -> None:", "PRIVATE KEY-----\", ] ) # Generated with `openssl rsa -in", "picker channel = self._make_sso_redirect_request(None) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\")", "cannot login with wrong as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params =", "d.update(build_synapse_client_resource_tree(self.hs)) return d def test_get_login_flows(self) -> None: \"\"\"GET /login should", "from synapse.server import HomeServer from synapse.types import create_requester from synapse.util", "\"test-client-id\", \"client_secret\": \"test-client-secret\", \"scopes\": [\"profile\"], \"authorization_endpoint\": \"https://issuer1/auth\", \"token_endpoint\": \"https://issuer1/token\", \"userinfo_endpoint\":", "str. result: Union[bytes, str] = jwt.encode(payload, secret, \"RS256\") if isinstance(result,", "it will have url-encoded the params properly, so we'll have", "by the per-user # ratelimiter. for i in range(0, 6):", "True, \"secret\": jwt_secret, \"algorithm\": jwt_algorithm, } def default_config(self) -> Dict[str,", "cas_ticket_url) # Because the user is deactivated they are served", "access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], False) def _delete_device(", "should redirect to the username picker self.assertEqual(channel.code, 302, channel.result) location_headers", "MemoryReactor, clock: Clock) -> HomeServer: self.base_url = \"https://matrix.goodserver.com/\" self.redirect_path =", "= {\"client_whitelist\": [\"https://x\"]} return config def create_resource_dict(self) -> Dict[str, Resource]:", "urllib.parse.parse_qs(service_uri_query) self.assertEqual(service_uri_params[\"redirectUrl\"][0], TEST_CLIENT_REDIRECT_URL) def test_multi_sso_redirect_to_saml(self) -> None: \"\"\"If SAML is", "the OIDC auth endpoint\"\"\" # pick the default OIDC provider", "flows we expect to appear in the list after the", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Signature has", "an IDP provider \"\"\" endpoint = \"/_matrix/client/r0/login/sso/redirect\" if idp_prov is", "{ \"enabled\": True, \"server_url\": CAS_SERVER, \"service_url\": \"https://matrix.goodserver.com:8448\", } config[\"saml2_config\"] =", "login.register_servlets, ] def default_config(self) -> Dict[str, Any]: config = super().default_config()", "ANY KIND, either express or implied. # See the License", "= None) -> FakeChannel: \"\"\"Send a request to /_matrix/client/r0/login/sso/redirect ...", "macaroon = pymacaroons.Macaroon.deserialize(oidc_session_cookie) self.assertEqual( self._get_value_from_macaroon(macaroon, \"client_redirect_url\"), TEST_CLIENT_REDIRECT_URL, ) channel =", "def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:", "users cannot login with wrong as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params", "the requested redirect URL path, query = location_headers[0].split(\"?\", 1) self.assertEqual(path,", "This is normally covered by the default test homeserver config", "the License. # You may obtain a copy of the", "= [ { \"idp_id\": \"idp1\", \"idp_name\": \"IDP1\", \"discover\": False, \"issuer\":", "synapse.appservice import ApplicationService from synapse.rest.client import devices, login, logout, register", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], 'JWT validation failed: Token is", "test_login_default_sub(self) -> None: \"\"\"Test reading user ID from the default", "import WhoamiRestServlet from synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server import HomeServer", "used to sign tokens that shouldn't be accepted by synapse.", "default test homeserver config # which sets these values to", "{ \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": \"fibble_wibble\"}, } channel", "# See the License for the specific language governing permissions", "Because the user is deactivated they are served an error", "template. channel = self.make_request(\"GET\", cas_ticket_url) self.assertEqual(channel.code, 302) location_headers = channel.headers.getRawHeaders(\"Location\")", "channel.headers.getRawHeaders(\"Location\") assert location_headers uri = location_headers[0] # hitting that picker", "\"monkey\") channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\")", "self.assertEqual(channel.json_body[\"soft_logout\"], True) # ... but if we delete that device,", "\"m.login.token\", \"token\": login_token}, ) self.assertEqual(chan.code, 200, chan.result) self.assertEqual(chan.json_body[\"user_id\"], \"@user1:test\") def", "Test that the response is HTML. self.assertEqual(channel.code, 200, channel.result) content_type_header_value", "channel.result, ) auth = { \"type\": \"m.login.password\", # https://github.com/matrix-org/synapse/issues/5665 #", "chan.result) self.assertEqual(chan.json_body[\"user_id\"], \"@user1:test\") def test_multi_sso_redirect_to_unknown(self) -> None: \"\"\"An unknown IdP", "to hard log out all of the user's sessions channel", "# FakeChannel.isSecure() returns False, so synapse will see the requested", "idp_prov endpoint += \"?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) return self.make_request( \"GET\", endpoint,", "params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_soft_logout(self) -> None:", "the \"aud\" claim', ) def test_login_aud_no_config(self) -> None: \"\"\"Test providing", "the fake CAS and serve the template. channel = self.make_request(\"GET\",", "-> Dict[str, Any]: config = super().default_config() config[\"jwt_config\"] = { \"enabled\":", "cas_uri = location_headers[0] cas_uri_path, cas_uri_query = cas_uri.split(\"?\", 1) # it", "self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_soft_logout(self) -> None: self.register_user(\"kermit\",", "3600}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation", "# http://..., so using http in the public_baseurl stops Synapse", "\"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], 'JWT validation failed: Token is missing the", "= [ login.register_servlets, ] def default_config(self) -> Dict[str, Any]: config", "channel def test_login_jwt_valid(self) -> None: channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"],", "the appservice bot can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params =", "self.assertEqual(len(p.links), 1) path, query = p.links[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") #", "self.assertEqual(channel.code, 302) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers self.assertEqual(location_headers[0][: len(redirect_url)], redirect_url)", "== 5: self.assertEqual(channel.result[\"code\"], b\"429\", channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"],", "str: # PyJWT 2.0.0 changed the return type of jwt.encode", "] class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, logout.register_servlets, devices.register_servlets,", "i == 5: self.assertEqual(channel.result[\"code\"], b\"429\", channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else:", "to 10000, but as we're overriding the entire # rc_login", "Any, Dict, List, Optional, Union from unittest.mock import Mock from", "# Test that the response is HTML. self.assertEqual(channel.code, 200, channel.result)", "failed: Signature verification failed\", ) def test_login_jwt_expired(self) -> None: channel", "channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_jwt_invalid_signature(self) -> None: channel = self.jwt_login({\"sub\":", ") -> None: \"\"\"Perform the UI-Auth to delete a device\"\"\"", "in ADDITIONAL_LOGIN_FLOWS] self.assertCountEqual( [f[\"type\"] for f in channel.json_body[\"flows\"]], expected_flow_types )", "self.assertEqual(channel.code, 401, channel.result) # check it's a UI-Auth fail self.assertEqual(", "be returned by an async function (as opposed to set", "validation failed: Signature has expired\" ) def test_login_jwt_not_before(self) -> None:", "[ { \"idp_id\": \"idp1\", \"idp_name\": \"IDP1\", \"discover\": False, \"issuer\": \"https://issuer1\",", "mxid, and device id. chan = self.make_request( \"POST\", \"/login\", content={\"type\":", "self._test_redirect(\"https://legit-site.com/\") @override_config({\"public_baseurl\": \"https://example.com\"}) def test_cas_redirect_login_fallback(self) -> None: self._test_redirect(\"https://example.com/_matrix/static/client/login\") def _test_redirect(self,", "self.assertIn(b\"SSO account deactivated\", channel.result[\"body\"]) @skip_unless(HAS_JWT, \"requires jwt\") class JWTTestCase(unittest.HomeserverTestCase): servlets", "@override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_all_sessions_after_being_soft_logged_out( self, ) -> None: self.register_user(\"kermit\", \"monkey\")", "the account login ratelimiter from raising first # # This", "%26 is &, %2B is + TEST_CLIENT_REDIRECT_URL = 'https://x?<ab c>&q\"+%3D%2B\"=\"fö%26=o\"'", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "\"https://matrix.goodserver.com:8448\", } config[\"saml2_config\"] = { \"sp_config\": { \"metadata\": {\"inline\": [TEST_SAML_METADATA]},", "fails with the correct error code self.assertEqual(channel.code, 400) self.assertEqual(channel.json_body[\"errcode\"], \"M_INVALID_PARAM\")", "# than 1min. self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms / 1000.0) params", "test behaviour after deleting the expired device # # we", "jwt.encode from bytes to str. result: Union[str, bytes] = jwt.encode(payload,", "assert location_headers oidc_uri = location_headers[0] oidc_uri_path, oidc_uri_query = oidc_uri.split(\"?\", 1)", "page self.assertEqual(channel.code, 200, channel.result) content_type_headers = channel.headers.getRawHeaders(\"Content-Type\") assert content_type_headers self.assertTrue(content_type_headers[-1].startswith(\"text/html\"))", "cookie including the redirect url cookie_headers = channel.headers.getRawHeaders(\"Set-Cookie\") assert cookie_headers", "claim', ) def test_login_iss_no_config(self) -> None: \"\"\"Test providing an issuer", "the audience claim.\"\"\" # A valid audience. channel = self.jwt_login({\"sub\":", "test_login_custom_sub(self) -> None: \"\"\"Test reading user ID from a custom", "jwt_pubkey = \"\\n\".join( [ \"-----BEGIN PUBLIC KEY-----\", \"<KEY>\", \"<KEY> \"-----END", "writing, software # distributed under the License is distributed on", "\"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=cas\", shorthand=False, ) self.assertEqual(channel.code, 302, channel.result)", "# First login (to create the user). self._test_redirect(redirect_url) # Deactivate", "saml_uri.split(\"?\", 1) # it should redirect us to the login", "access_token: str, user_id: str, password: str, device_id: str ) ->", "we're overriding the entire # rc_login dict here, we need", "redirect to it\"\"\" channel = self._make_sso_redirect_request(\"oidc\") self.assertEqual(channel.code, 302, channel.result) location_headers", "access_token=access_token, content={\"auth\": auth}, ) self.assertEqual(channel.code, 200, channel.result) @override_config({\"session_lifetime\": \"24h\"}) def", "including the redirect url cookie_headers = channel.headers.getRawHeaders(\"Set-Cookie\") assert cookie_headers cookies:", "cookie cookies: Dict[str, str] = {} channel.extract_cookies(cookies) self.assertIn(\"username_mapping_session\", cookies) session_id", "submit a username to the username picker, which should serve", "value) because the corresponding Synapse code awaits on it. \"\"\"", "# A valid issuer. channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"test-issuer\"})", "tests.handlers.test_oidc import HAS_OIDC from tests.handlers.test_saml import has_saml2 from tests.rest.client.utils import", "self.assertEqual(chan.code, 302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers # send", "to redirect to # https://.... BASE_URL = \"http://%s/\" % (SYNAPSE_SERVER_PUBLIC_HOSTNAME,)", "picker_url = location_headers[0] self.assertEqual(picker_url, \"/_synapse/client/pick_username/account_details\") # ... with a username_mapping_session", "\"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Signature has expired\" )", "\"server_url\": CAS_SERVER, \"service_url\": \"https://matrix.goodserver.com:8448\", } config[\"saml2_config\"] = { \"sp_config\": {", "form-parsing without a valid # content-length header. (\"Content-Length\", str(len(content))), ],", "self.assertIn( session_id, username_mapping_sessions, \"session id not found in map\", )", "our redirect link self.assertIn(redirect_url, channel.result[\"body\"].decode(\"UTF-8\")) @override_config( { \"sso\": { \"client_whitelist\":", "= [login.register_servlets] def default_config(self) -> Dict[str, Any]: config = super().default_config()", "\"<KEY>\", \"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L\", \"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY\", \"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I\", \"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb\", \"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0\", \"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m\", \"-----END RSA PRIVATE", "= username_mapping_sessions[session_id] self.assertEqual(session.remote_user_id, \"tester\") self.assertEqual(session.display_name, \"Jonny\") self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL) # the", "\"M_INVALID_PARAM\") @skip_unless(has_saml2 and HAS_OIDC, \"Requires SAML2 and OIDC\") class MultiSSOTestCase(unittest.HomeserverTestCase):", "test_login_aud(self) -> None: \"\"\"Test validating the audience claim.\"\"\" # A", "channel.extract_cookies(cookies) self.assertIn(\"username_mapping_session\", cookies) session_id = cookies[\"username_mapping_session\"] # introspect the sso", "a (valid) url with some annoying characters in. %3D is", "\"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url)) ) # Get Synapse to call the", "that should redirect to the username picker self.assertEqual(channel.code, 302, channel.result)", "use the XMLSecurity backend to avoid relying on xmlsec1 \"crypto_backend\":", "with some annoying characters in. %3D is =, %26 is", "device_id channel = self.make_request( \"POST\", \"/_matrix/client/v3/login\", json.dumps(body).encode(\"utf8\"), custom_headers=None, ) #", "location_headers oidc_uri = location_headers[0] oidc_uri_path, oidc_uri_query = oidc_uri.split(\"?\", 1) #", "query = location_headers[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") # it will have", "failed: Signature verification failed\", ) AS_USER = \"as_user_alice\" class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase):", "a device\"\"\" channel = self.make_request( b\"DELETE\", \"devices/\" + device_id, access_token=access_token", "return self.hs def test_login_appservice_user(self) -> None: \"\"\"Test that an appservice", "-> None: \"\"\"An unknown IdP should cause a 400\"\"\" channel", "'\"fö&=o\"')] # (possibly experimental) login flows we expect to appear", "path=location_headers[0], custom_headers=[(\"Cookie\", \"username_mapping_session=\" + session_id)], ) self.assertEqual(chan.code, 302, chan.result) location_headers", "None: self._test_redirect(\"https://example.com/_matrix/static/client/login\") def _test_redirect(self, redirect_url: str) -> None: \"\"\"Tests that", "\"<KEY>\", \"<KEY>KVaZ/gTOM9+9MwlmhidrUOweKfB/\", \"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq\", \"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN\", \"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA\", \"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=\", \"-----END RSA PRIVATE KEY-----\",", "\"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\" + str(i)}, \"password\": \"<PASSWORD>\", }", "username picker flow of SSO login\"\"\" servlets = [login.register_servlets] def", "config[\"public_baseurl\"] = ( config.get(\"public_baseurl\") or \"https://matrix.goodserver.com:8448\" ) config[\"cas_config\"] = {", "<cas:proxies> <cas:proxy>https://proxy2/pgtUrl</cas:proxy> <cas:proxy>https://proxy1/pgtUrl</cas:proxy> </cas:proxies> </cas:authenticationSuccess> </cas:serviceResponse> \"\"\" % cas_user_id ).encode(\"utf-8\")", "\"burst_count\": 10000}, \"failed_attempts\": {\"per_second\": 0.17, \"burst_count\": 5}, } } )", "per-user # ratelimiter. for i in range(0, 6): self.register_user(\"kermit\" +", "\"m.id.user\", \"user\": user_id}, \"user\": user_id, \"password\": password, \"session\": channel.json_body[\"session\"], }", "TEST_CLIENT_REDIRECT_URL) def test_multi_sso_redirect_to_saml(self) -> None: \"\"\"If SAML is chosen, should", "good enough for tests!) jwt_privatekey = \"\\n\".join( [ \"-----BEGIN RSA", "picker flow.\"\"\" # do the start of the login flow", "... possibly specifying an IDP provider \"\"\" endpoint = \"/_matrix/client/r0/login/sso/redirect\"", "None: \"\"\"If CAS is chosen, should redirect to the CAS", "b\"200\", channel.result) def test_login_appservice_wrong_user(self) -> None: \"\"\"Test that non-as users", "check that the redirectUrl is correctly encoded in the service", "\"?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) return self.make_request( \"GET\", endpoint, custom_headers=[(\"Host\", SYNAPSE_SERVER_PUBLIC_HOSTNAME)], )", "\"GET\", endpoint, custom_headers=[(\"Host\", SYNAPSE_SERVER_PUBLIC_HOSTNAME)], ) @staticmethod def _get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key:", "test_multi_sso_redirect_to_unknown(self) -> None: \"\"\"An unknown IdP should cause a 400\"\"\"", "synapse as \"jwt_secret\", and tokens # signed by the private", "claim.\"\"\" # A valid issuer. channel = self.jwt_login({\"sub\": \"kermit\", \"iss\":", "of the cas server self.assertEqual(cas_uri_path, CAS_SERVER + \"/login\") # check", "default OIDC provider channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)", "xmlns:cas='http://www.yale.edu/tp/cas'> <cas:authenticationSuccess> <cas:user>%s</cas:user> <cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket> <cas:proxies> <cas:proxy>https://proxy2/pgtUrl</cas:proxy> <cas:proxy>https://proxy1/pgtUrl</cas:proxy> </cas:proxies> </cas:authenticationSuccess> </cas:serviceResponse>", "the issuer claim.\"\"\" # A valid issuer. channel = self.jwt_login({\"sub\":", "\"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"audiences\": [\"test-audience\"]}})", "unknown IdP should cause a 400\"\"\" channel = self.make_request( \"GET\",", "header[0] == b\"Content-Type\": content_type_header_value = header[1].decode(\"utf8\") self.assertTrue(content_type_header_value.startswith(\"text/html\")) # Test that", "create_requester from synapse.util import Clock from tests import unittest from", "\"https://issuer1\", \"client_id\": \"test-client-id\", \"client_secret\": \"test-client-secret\", \"scopes\": [\"profile\"], \"authorization_endpoint\": \"https://issuer1/auth\", \"token_endpoint\":", "self.setup_test_homeserver() self.hs.config.registration.enable_registration = True self.hs.config.registration.registrations_require_3pid = [] self.hs.config.registration.auto_join_rooms = []", "= int(time.time()) channel = self.jwt_login({\"sub\": \"frog\", \"nbf\": now + 3600})", "foo.key (jwt_privatekey). jwt_pubkey = \"\\n\".join( [ \"-----BEGIN PUBLIC KEY-----\", \"<KEY>\",", "the default test homeserver config # which sets these values", "-> None: \"\"\"Test the happy path of a username picker", "a login request with the bad device_id channel = self.make_request(", "SAML_SERVER = \"https://test.saml.server/idp/sso\" TEST_SAML_METADATA = \"\"\" <md:EntityDescriptor xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\"> <md:IDPSSODescriptor protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\">", "= { \"enabled\": True, \"secret\": jwt_secret, \"algorithm\": jwt_algorithm, } def", "url-encoded the params properly, so we'll have to parse them", "p = TestHtmlParser() p.feed(channel.text_body) p.close() # ... which should contain", "= {\"type\": \"org.matrix.login.jwt\", \"token\": self.jwt_encode(*args)} channel = self.make_request(b\"POST\", LOGIN_URL, params)", "path of a username picker flow.\"\"\" # do the start", "SSO login flow serves a confirmation page before redirecting a", "-> str: # PyJWT 2.0.0 changed the return type of", "400\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz\", ) self.assertEqual(channel.code, 400, channel.result)", ") self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_no_token(self) -> None: \"\"\"Test that", "redirect to SAML_SERVER = \"https://test.saml.server/idp/sso\" TEST_SAML_METADATA = \"\"\" <md:EntityDescriptor xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\">", "\"Jonny\") self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL) # the expiry time should be about", "code awaits on it. \"\"\" return ( \"\"\" <cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'>", "provider config[\"oidc_config\"] = TEST_OIDC_CONFIG # additional OIDC providers config[\"oidc_providers\"] =", "CAS_SERVER, } cas_user_id = \"username\" self.user_id = \"@%s:test\" % cas_user_id", "without requiring it in the configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\",", "# Request the CAS ticket. cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" %", "{\"id\": \"cas\", \"name\": \"CAS\"}, {\"id\": \"saml\", \"name\": \"SAML\"}, {\"id\": \"oidc-idp1\",", "private key. @skip_unless(HAS_JWT, \"requires jwt\") class JWTPubKeyTestCase(unittest.HomeserverTestCase): servlets = [", "our # matrix access token, mxid, and device id. chan", "import FakeChannel from tests.test_utils.html_parsers import TestHtmlParser from tests.unittest import HomeserverTestCase,", "out of the returned redirect uri login_token = params[2][1] #", "\"kermit\"}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.code,", "from tests.test_utils.html_parsers import TestHtmlParser from tests.unittest import HomeserverTestCase, override_config, skip_unless", "channel = self.helper.auth_via_oidc( {\"sub\": \"tester\", \"displayname\": \"Jonny\"}, TEST_CLIENT_REDIRECT_URL ) #", "to tell pysaml2 where to redirect to SAML_SERVER = \"https://test.saml.server/idp/sso\"", "if isinstance(result, bytes): return result.decode(\"ascii\") return result def jwt_login(self, *args:", ":] raise ValueError(\"No %s caveat in macaroon\" % (key,)) class", "PyJWT 2.0.0 changed the return type of jwt.encode from bytes", "channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"audiences\": [\"test-audience\"]}}) def test_login_aud(self) ->", "= self.jwt_login({\"sub\": \"frog\"}, \"notsecret\") self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(", "assert location_headers picker_url = location_headers[0] self.assertEqual(picker_url, \"/_synapse/client/pick_username/account_details\") # ... with", "homeserver config # which sets these values to 10000, but", "is missing the \"aud\" claim', ) def test_login_aud_no_config(self) -> None:", "\"scopes\": [\"profile\"], \"authorization_endpoint\": \"https://issuer1/auth\", \"token_endpoint\": \"https://issuer1/token\", \"userinfo_endpoint\": \"https://issuer1/userinfo\", \"user_mapping_provider\": {", "return config def create_resource_dict(self) -> Dict[str, Resource]: d = super().create_resource_dict()", "channel.json_body[\"access_token\"] device_id = channel.json_body[\"device_id\"] # we should now be able", "but as we're overriding the entire # rc_login dict here,", "self.assertEqual(channel.code, 200, channel.result) # time passes self.reactor.advance(24 * 3600) #", "IdP, return a 404\"\"\" channel = self._make_sso_redirect_request(\"xxx\") self.assertEqual(channel.code, 404, channel.result)", "\"idp1\", \"idp_name\": \"IDP1\", \"discover\": False, \"issuer\": \"https://issuer1\", \"client_id\": \"test-client-id\", \"client_secret\":", "b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Invalid JWT\") @override_config({\"jwt_config\": {**base_config, \"issuer\":", "uri) self.assertEqual(channel.code, 200, channel.result) # parse the form to check", "def test_login_jwt_valid(self) -> None: channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\",", "Any], secret: str = jwt_secret) -> str: # PyJWT 2.0.0", "shorthand=False, ) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers", "content_is_form=True, custom_headers=[ (\"Cookie\", \"username_mapping_session=\" + session_id), # old versions of", "\"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "invalid audience. channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\",", "bot can use /login\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\":", "device_id longer than 512 characters device_id = \"yolo\" * 512", "True) # ... but if we delete that device, it", "\"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"subject_claim\": \"username\"}})", "as \"jwt_secret\", and tokens # signed by the private key.", "test_login_via_oidc(self) -> None: \"\"\"If OIDC is chosen, should redirect to", "\"algorithm\": jwt_algorithm, } def default_config(self) -> Dict[str, Any]: config =", "2.0.0 changed the return type of jwt.encode from bytes to", "\"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config,", "SAML_SERVER, } LOGIN_URL = b\"/_matrix/client/r0/login\" TEST_URL = b\"/_matrix/client/r0/account/whoami\" # a", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"audiences\": [\"test-audience\"]}}) def", "= self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 200, channel.result) # time passes", "given redirect URL.\"\"\" cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url)) )", "\"RS256\", } return config def jwt_encode(self, payload: Dict[str, Any], secret:", "self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_MISSING_TOKEN\") # log in as normal", "synapse will see the requested uri as # http://..., so", "... but if we delete that device, it will be", "}, } ] return config def create_resource_dict(self) -> Dict[str, Resource]:", "self.hs = self.setup_test_homeserver() self.service = ApplicationService( id=\"unique_identifier\", token=\"some_token\", hostname=\"example.com\", sender=\"@asbot:example.com\",", "\"@kermit:test\") def test_login_jwt_valid_unregistered(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"}) self.assertEqual(channel.result[\"code\"],", "} channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.code, 200, channel.result) access_token", "channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_valid_unregistered(self) -> None: channel = self.jwt_login({\"sub\":", ") self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_wrong_user(self) -> None: \"\"\"Test that", "\"iss\": \"test-issuer\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid", "1) # it should redirect us to the auth page", "in as normal params = { \"type\": \"m.login.password\", \"identifier\": {\"type\":", ") self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_user_bot(self) -> None: \"\"\"Test that", "twisted don't do form-parsing without a valid # content-length header.", "channel.headers.getRawHeaders(\"Location\") assert location_headers saml_uri = location_headers[0] saml_uri_path, saml_uri_query = saml_uri.split(\"?\",", "secret, self.jwt_algorithm) if isinstance(result, bytes): return result.decode(\"ascii\") return result def", "\"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz\", ) self.assertEqual(channel.code, 400, channel.result) def test_client_idp_redirect_to_unknown(self) -> None: \"\"\"If", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Invalid JWT\") @override_config({\"jwt_config\": {**base_config, \"issuer\": \"test-issuer\"}})", "encoded in the service param - ie, the # place", "saml_uri_query = saml_uri.split(\"?\", 1) # it should redirect us to", "@override_config( { \"rc_login\": { # Prevent the address login ratelimiter", "Not providing an audience. channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\",", "self.jwt_login({\"username\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_no_token(self) ->", "id. chan = self.make_request( \"POST\", \"/login\", content={\"type\": \"m.login.token\", \"token\": login_token},", "in foo.key (jwt_privatekey). jwt_pubkey = \"\\n\".join( [ \"-----BEGIN PUBLIC KEY-----\",", "# public_baseurl for some tests. It uses an http:// scheme", "] return config def create_resource_dict(self) -> Dict[str, Resource]: d =", "redirect to the username picker self.assertEqual(channel.code, 302, channel.result) location_headers =", "\"user\": AS_USER}, } channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "} cas_user_id = \"username\" self.user_id = \"@%s:test\" % cas_user_id async", "config[\"jwt_config\"] = self.base_config return config def jwt_encode(self, payload: Dict[str, Any],", "and HAS_OIDC, \"Requires SAML2 and OIDC\") class MultiSSOTestCase(unittest.HomeserverTestCase): \"\"\"Tests for", "\"https://issuer1/userinfo\", \"user_mapping_provider\": { \"config\": {\"localpart_template\": \"{{ user.sub }}\"} }, }", "for homeservers with multiple SSO providers enabled\"\"\" servlets = [", "that shouldn't be accepted by synapse. # Generated just like", "of the returned redirect uri login_token = params[2][1] # finally,", "\"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Signature verification failed\", )", "content_type_header_value = header[1].decode(\"utf8\") self.assertTrue(content_type_header_value.startswith(\"text/html\")) # Test that the body isn't", "b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_valid_unregistered(self) -> None: channel =", "tokens are signed by this and validated using the pubkey.", "as we're overriding the entire # rc_login dict here, we", "login_token}, ) self.assertEqual(chan.code, 200, chan.result) self.assertEqual(chan.json_body[\"user_id\"], \"@user1:test\") def test_multi_sso_redirect_to_unknown(self) ->", "%2B is + TEST_CLIENT_REDIRECT_URL = 'https://x?<ab c>&q\"+%3D%2B\"=\"fö%26=o\"' # the query", "{\"sub\": \"user1\"}) # that should serve a confirmation page self.assertEqual(channel.code,", "= ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url)) ) # Get Synapse to", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "= [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ] jwt_secret = \"secret\" jwt_algorithm =", "\"\"\"Test validating the audience claim.\"\"\" # A valid audience. channel", "to the auth page of the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT)", "endpoint = \"/_matrix/client/r0/login/sso/redirect\" if idp_prov is not None: endpoint +=", "# ... which should contain our redirect link self.assertEqual(len(p.links), 1)", "the params properly, so we'll have to parse them params", "channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"],", "{ \"config\": {\"display_name_template\": \"{{ user.displayname }}\"} } # whitelist this", "to hard logout this session channel = self.make_request(b\"POST\", \"/logout\", access_token=access_token)", "channel.json_body[\"error\"], \"JWT validation failed: Invalid audience\" ) # Not providing", "endpoint += \"?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) return self.make_request( \"GET\", endpoint, custom_headers=[(\"Host\",", "set as the mock's return value) because the corresponding Synapse", "servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, logout.register_servlets, devices.register_servlets, lambda hs, http_server:", "log in as normal params = { \"type\": \"m.login.password\", \"identifier\":", "claim.\"\"\" # A valid audience. channel = self.jwt_login({\"sub\": \"kermit\", \"aud\":", "payload: Dict[str, Any], secret: str = jwt_privatekey) -> str: #", "test_login_jwt_expired(self) -> None: channel = self.jwt_login({\"sub\": \"frog\", \"exp\": 864000}) self.assertEqual(channel.result[\"code\"],", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "\"name\": \"SAML\"}, {\"id\": \"oidc-idp1\", \"name\": \"IDP1\"}, {\"id\": \"oidc\", \"name\": \"OIDC\"},", "page of the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) # ... and", "location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers oidc_uri = location_headers[0] oidc_uri_path, oidc_uri_query", "channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\",", "TestHtmlParser() p.feed(html) p.close() # there should be a link for", "using the appservice login method \"\"\" self.register_appservice_user(AS_USER, self.service.token) params =", "LoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, logout.register_servlets, devices.register_servlets, lambda hs,", "\"rc_login\": { # Prevent the address login ratelimiter from raising", "-> None: channel = self.jwt_login({\"sub\": \"frog\", \"exp\": 864000}) self.assertEqual(channel.result[\"code\"], b\"403\",", "{\"type\": \"m.id.user\", \"user\": AS_USER}, } channel = self.make_request(b\"POST\", LOGIN_URL, params)", "\"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\" + str(i)}, \"password\":", "servlets = [ login.register_servlets, ] def default_config(self) -> Dict[str, Any]:", "params[2][1] chan = self.make_request( \"POST\", \"/login\", content={\"type\": \"m.login.token\", \"token\": login_token},", "needs to be returned by an async function (as opposed", "= channel.headers.getRawHeaders(\"Location\") assert location_headers uri = location_headers[0] # hitting that", "should be lower # than 1min. self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms", "self.user_id = \"@%s:test\" % cas_user_id async def get_raw(uri: str, args:", "\"username_mapping_session=\" + session_id)], ) self.assertEqual(chan.code, 302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\")", "user ID from a custom subject claim.\"\"\" channel = self.jwt_login({\"username\":", "in cookie_headers: key, value = h.split(\";\")[0].split(\"=\", maxsplit=1) cookies[key] = value", "after deleting the expired device # # we now log", "[TEST_CLIENT_REDIRECT_URL]) returned_idps.append(params[\"idp\"][0]) self.assertCountEqual(returned_idps, [\"cas\", \"oidc\", \"oidc-idp1\", \"saml\"]) def test_multi_sso_redirect_to_cas(self) ->", "self.hs = self.setup_test_homeserver() self.hs.config.registration.enable_registration = True self.hs.config.registration.registrations_require_3pid = [] self.hs.config.registration.auto_join_rooms", "login with the as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = {", "Generated just like jwt_privatekey. bad_privatekey = \"\\n\".join( [ \"-----BEGIN RSA", "self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Token field for JWT is missing\") #", "= value oidc_session_cookie = cookies[\"oidc_session\"] macaroon = pymacaroons.Macaroon.deserialize(oidc_session_cookie) self.assertEqual( self._get_value_from_macaroon(macaroon,", "\"identifier\": {\"type\": \"m.id.user\", \"user\": user_id}, \"user\": user_id, \"password\": password, \"session\":", "to set this manually as well \"account\": {\"per_second\": 10000, \"burst_count\":", "failed: The token is not yet valid (nbf)\", ) def", "self.clock.time_msec() + (15 * 60 * 1000) self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000)", "ImportError: HAS_JWT = False # synapse server name: used to", "expected_expiry, tolerance=1000) # Now, submit a username to the username", "def test_login_appservice_wrong_as(self) -> None: \"\"\"Test that as users cannot login", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"subject_claim\": \"username\"}}) def", "tests import unittest from tests.handlers.test_oidc import HAS_OIDC from tests.handlers.test_saml import", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "= self.make_request(b\"POST\", \"/logout/all\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_with_overly_long_device_id_fails(self) ->", "_, service_uri_query = service_uri.split(\"?\", 1) service_uri_params = urllib.parse.parse_qs(service_uri_query) self.assertEqual(service_uri_params[\"redirectUrl\"][0], TEST_CLIENT_REDIRECT_URL)", "= self._make_sso_redirect_request(None) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers", "channel.result[\"body\"].decode(\"UTF-8\")) @override_config( { \"sso\": { \"client_whitelist\": [ \"https://legit-site.com/\", \"https://other-site.com/\", ]", "rsa -in foo.key -pubout`, with the the above # private", "setting of synapse. Valid # tokens are signed by this", "< 6000) self.reactor.advance(retry_after_ms / 1000.0) params = { \"type\": \"m.login.password\",", "access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_with_overly_long_device_id_fails(self) -> None: self.register_user(\"mickey\", \"<PASSWORD>\")", "in the configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"],", "id=\"unique_identifier\", token=\"some_token\", hostname=\"example.com\", sender=\"@asbot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as_user.*\", \"exclusive\":", "(key,)) class CASTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, ] def make_homeserver(self,", "self.reactor.advance(retry_after_ms / 1000.0 + 1.0) params = { \"type\": \"m.login.password\",", "the SAML server\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)", "Get Synapse to call the fake CAS and serve the", "/_matrix/client/r0/login/sso/redirect ... possibly specifying an IDP provider \"\"\" endpoint =", "\"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\" + str(i)}, \"password\": \"<PASSWORD>\",", "= self._make_sso_redirect_request(\"oidc\") self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers", "valid issuer. channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"test-issuer\"}) self.assertEqual(channel.result[\"code\"], b\"200\",", "channel.headers.getRawHeaders(\"Location\") assert location_headers picker_url = location_headers[0] self.assertEqual(picker_url, \"/_synapse/client/pick_username/account_details\") # ...", "\"JWT validation failed: Signature verification failed\", ) def test_login_jwt_expired(self) ->", ") def test_login_jwt_not_before(self) -> None: now = int(time.time()) channel =", "specific language governing permissions and # limitations under the License.", "try to hard log out all of the user's sessions", "ValueError(\"No %s caveat in macaroon\" % (key,)) class CASTestCase(unittest.HomeserverTestCase): servlets", "HomeServer) -> None: self.deactivate_account_handler = hs.get_deactivate_account_handler() def test_cas_redirect_confirm(self) -> None:", "= TEST_OIDC_CONFIG # additional OIDC providers config[\"oidc_providers\"] = [ {", "HAS_OIDC from tests.handlers.test_saml import has_saml2 from tests.rest.client.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG", "channel = self._make_sso_redirect_request(None) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert", "def default_config(self) -> Dict[str, Any]: config = super().default_config() # If", "test_login_appservice_user(self) -> None: \"\"\"Test that an appservice user can use", "path=picker_url, content=content, content_is_form=True, custom_headers=[ (\"Cookie\", \"username_mapping_session=\" + session_id), # old", "b\"DELETE\", \"devices/\" + device_id, access_token=access_token, content={\"auth\": auth}, ) self.assertEqual(channel.code, 200,", "html = channel.result[\"body\"].decode(\"utf-8\") p = TestHtmlParser() p.feed(html) p.close() # there", "TEST_URL, access_token=access_token) self.assertEqual(channel.code, 200, channel.result) # time passes self.reactor.advance(24 *", "RSA PRIVATE KEY-----\", ] ) # Generated with `openssl rsa", ") # Not providing an issuer. channel = self.jwt_login({\"sub\": \"kermit\"})", "for f in channel.json_body[\"flows\"]], expected_flow_types ) flows = {flow[\"type\"]: flow", "\"rc_login\": { \"address\": {\"per_second\": 0.17, \"burst_count\": 5}, # Prevent the", "\"\"\"If OIDC is chosen, should redirect to the OIDC auth", "validation failed: The token is not yet valid (nbf)\", )", ") self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers saml_uri", "self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # ... but if we delete", "content={\"type\": \"m.login.token\", \"token\": login_token}, ) self.assertEqual(chan.code, 200, chan.result) self.assertEqual(chan.json_body[\"user_id\"], \"@user1:test\")", "self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.code, 200, channel.result) access_token = channel.json_body[\"access_token\"] device_id", "channel.json_body[\"error\"], 'JWT validation failed: Token is missing the \"iss\" claim',", "jwt_privatekey. bad_privatekey = \"\\n\".join( [ \"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\",", "# you may not use this file except in compliance", "serves a confirmation page before redirecting a user to the", "pymacaroons.Macaroon.deserialize(oidc_session_cookie) self.assertEqual( self._get_value_from_macaroon(macaroon, \"client_redirect_url\"), TEST_CLIENT_REDIRECT_URL, ) channel = self.helper.complete_oidc_auth(oidc_uri, cookies,", "corresponding Synapse code awaits on it. \"\"\" return ( \"\"\"", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_jwt_invalid_signature(self) -> None: channel", "[], }, ) self.hs.get_datastores().main.services_cache.append(self.service) self.hs.get_datastores().main.services_cache.append(self.another_service) return self.hs def test_login_appservice_user(self) ->", "b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid audience. channel =", "\"burst_count\": 10000}, } } ) def test_POST_ratelimiting_per_account(self) -> None: self.register_user(\"kermit\",", "valid (nbf)\", ) def test_login_no_sub(self) -> None: channel = self.jwt_login({\"username\":", "\"\"\"Test providing an issuer claim without requiring it in the", "it will be a proper logout self._delete_device(access_token_2, \"kermit\", \"monkey\", device_id)", "self.make_request(b\"POST\", LOGIN_URL, params) return channel def test_login_jwt_valid_registered(self) -> None: self.register_user(\"kermit\",", "retry_after_ms should be lower # than 1min. self.assertTrue(retry_after_ms < 6000)", "self.default_config() config[\"public_baseurl\"] = ( config.get(\"public_baseurl\") or \"https://matrix.goodserver.com:8448\" ) config[\"cas_config\"] =", "+ 3600}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT", "# pick the default OIDC provider channel = self.make_request( \"GET\",", "from a call to the `/proxyValidate` endpoint of a CAS", "\"&idp=saml\", ) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers", "\"/_synapse/client/pick_username/account_details\") # ... with a username_mapping_session cookie cookies: Dict[str, str]", "p.close() # ... which should contain our redirect link self.assertEqual(len(p.links),", "of the login flow channel = self.helper.auth_via_oidc( {\"sub\": \"tester\", \"displayname\":", "# matrix access token, mxid, and device id. login_token =", "urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=oidc\", ) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\")", "config=config, proxied_http_client=mocked_http_client, ) return self.hs def prepare(self, reactor: MemoryReactor, clock:", "Invalid audience\" ) def test_login_default_sub(self) -> None: \"\"\"Test reading user", "-> None: \"\"\"Test that as users cannot login with wrong", "token channel = self.make_request(b\"GET\", TEST_URL) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_MISSING_TOKEN\")", "\"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_invalid_signature(self) -> None:", "as the mock's return value) because the corresponding Synapse code", "This key's pubkey is used as the jwt_secret setting of", "\"frog\", \"exp\": 864000}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"],", "\"devices/\" + device_id, access_token=access_token, content={\"auth\": auth}, ) self.assertEqual(channel.code, 200, channel.result)", "request/min, retry_after_ms should be lower # than 1min. self.assertTrue(retry_after_ms <", "def test_POST_ratelimiting_per_account(self) -> None: self.register_user(\"kermit\", \"monkey\") for i in range(0,", "of the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) # ... and should", "test_login_appservice_no_token(self) -> None: \"\"\"Test that users must provide a token", "default subject claim.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "= {\"type\": \"org.matrix.login.jwt\"} channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\",", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "username picker self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers", "as well \"address\": {\"per_second\": 10000, \"burst_count\": 10000}, } } )", "\"kermit\" + str(i)}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL,", "= self.make_request(b\"GET\", TEST_URL) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_MISSING_TOKEN\") # log", "# test that the login fails with the correct error", "clock: Clock) -> HomeServer: self.base_url = \"https://matrix.goodserver.com/\" self.redirect_path = \"_synapse/client/login/sso/redirect/confirm\"", "an audience without requiring it in the configuration.\"\"\" channel =", "str = jwt_privatekey) -> str: # PyJWT 2.0.0 changed the", "def _get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str) -> str: prefix = key", "{\"localpart_template\": \"{{ user.sub }}\"} }, } ] return config def", "login page of the SAML server self.assertEqual(saml_uri_path, SAML_SERVER) # the", "-> None: \"\"\"Test reading user ID from the default subject", "\"http://%s/\" % (SYNAPSE_SERVER_PUBLIC_HOSTNAME,) # CAS server used in some tests", "TEST_SAML_METADATA = \"\"\" <md:EntityDescriptor xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\"> <md:IDPSSODescriptor protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\"> <md:SingleSignOnService Binding=\"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect\" Location=\"%(SAML_SERVER)s\"/>", "copied from https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20 This needs to be returned by an", "False, so synapse will see the requested uri as #", "\"kermit\"}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) if", "BASE_URL config[\"oidc_config\"] = {} config[\"oidc_config\"].update(TEST_OIDC_CONFIG) config[\"oidc_config\"][\"user_mapping_provider\"] = { \"config\": {\"display_name_template\":", "link self.assertEqual(len(p.links), 1) path, query = p.links[0].split(\"?\", 1) self.assertEqual(path, \"https://x\")", "that the returned location matches the requested redirect URL path,", "6000) self.reactor.advance(retry_after_ms / 1000.0 + 1.0) params = { \"type\":", "oidc_uri_path, oidc_uri_query = oidc_uri.split(\"?\", 1) # it should redirect us", "header in channel.result.get(\"headers\", []): if header[0] == b\"Content-Type\": content_type_header_value =", "not yet valid (nbf)\", ) def test_login_no_sub(self) -> None: channel", "f in ADDITIONAL_LOGIN_FLOWS] self.assertCountEqual( [f[\"type\"] for f in channel.json_body[\"flows\"]], expected_flow_types", "False return self.hs @override_config( { \"rc_login\": { \"address\": {\"per_second\": 0.17,", "relay_state_param = saml_uri_params[\"RelayState\"][0] self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL) def test_login_via_oidc(self) -> None: \"\"\"If", "channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "@skip_unless(HAS_JWT, \"requires jwt\") class JWTTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets,", "# A valid audience. channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"test-audience\"})", "under the Apache License, Version 2.0 (the \"License\"); # you", "def _make_sso_redirect_request(self, idp_prov: Optional[str] = None) -> FakeChannel: \"\"\"Send a", "delete a device\"\"\" channel = self.make_request( b\"DELETE\", \"devices/\" + device_id,", "CASTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, ] def make_homeserver(self, reactor: MemoryReactor,", "oidc_uri_query = oidc_uri.split(\"?\", 1) # it should redirect us to", "{\"display_name_template\": \"{{ user.displayname }}\"} } # whitelist this client URI", "[ login.register_servlets, ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) ->", "channel.json_body[\"error\"], \"JWT validation failed: The token is not yet valid", "-> None: \"\"\"Test that users must provide a token when", "to sign tokens that shouldn't be accepted by synapse. #", "= { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": \"fibble_wibble\"}, }", "sender=\"@asbot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS:", "opposed to set as the mock's return value) because the", "in some tests CAS_SERVER = \"https://fake.test\" # just enough to", "http://..., so using http in the public_baseurl stops Synapse trying", "isn't empty. self.assertTrue(len(channel.result[\"body\"]) > 0) # And that it contains", "None: self.register_user(\"mickey\", \"<PASSWORD>\") # create a device_id longer than 512", "ensure that the returned location matches the requested redirect URL", "self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Token field for JWT", "UI-Auth fail self.assertEqual( set(channel.json_body.keys()), {\"flows\", \"params\", \"session\"}, channel.result, ) auth", "return ( \"\"\" <cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'> <cas:authenticationSuccess> <cas:user>%s</cas:user> <cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket> <cas:proxies> <cas:proxy>https://proxy2/pgtUrl</cas:proxy>", "= self.base_config return config def jwt_encode(self, payload: Dict[str, Any], secret:", "confirmation page config[\"sso\"] = {\"client_whitelist\": [\"https://x\"]} return config def create_resource_dict(self)", "devices, login, logout, register from synapse.rest.client.account import WhoamiRestServlet from synapse.rest.synapse.client", "server name: used to populate public_baseurl in some tests SYNAPSE_SERVER_PUBLIC_HOSTNAME", "first # # This is normally covered by the default", "should cause a 400\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz\", )", "-> None: # Create different users so we're sure not", "1.0) params = { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\":", "\"\"\"Tests that the SSO login flow serves a confirmation page", "<md:SingleSignOnService Binding=\"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect\" Location=\"%(SAML_SERVER)s\"/> </md:IDPSSODescriptor> </md:EntityDescriptor> \"\"\" % { \"SAML_SERVER\": SAML_SERVER,", "str, args: Any) -> bytes: \"\"\"Return an example response payload", "params = urllib.parse.parse_qs(query) self.assertEqual(params[\"redirectUrl\"], [TEST_CLIENT_REDIRECT_URL]) returned_idps.append(params[\"idp\"][0]) self.assertCountEqual(returned_idps, [\"cas\", \"oidc\", \"oidc-idp1\",", "1000) self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000) # Now, submit a username to", "channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers saml_uri = location_headers[0] saml_uri_path,", "set this manually as well \"account\": {\"per_second\": 10000, \"burst_count\": 10000},", "pick the default OIDC provider channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\"", "# Generated just like jwt_privatekey. bad_privatekey = \"\\n\".join( [ \"-----BEGIN", "\"client_whitelist\": [ \"https://legit-site.com/\", \"https://other-site.com/\", ] } } ) def test_cas_redirect_whitelisted(self)", "client tries to pick an unknown IdP, return a 404\"\"\"", "# Generated with `openssl rsa -in foo.key -pubout`, with the", "\"session id not found in map\", ) session = username_mapping_sessions[session_id]", "the client tries to pick an unknown IdP, return a", "secret, \"RS256\") if isinstance(result, bytes): return result.decode(\"ascii\") return result def", "address login ratelimiter from raising first # # This is", "channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) @override_config({\"session_lifetime\": \"24h\"})", "channel.result) # parse the form to check it has fields", "service_uri_query = service_uri.split(\"?\", 1) service_uri_params = urllib.parse.parse_qs(service_uri_query) self.assertEqual(service_uri_params[\"redirectUrl\"][0], TEST_CLIENT_REDIRECT_URL) def", "\"/_matrix/client/r0/login\") self.assertEqual(channel.code, 200, channel.result) expected_flow_types = [ \"m.login.cas\", \"m.login.sso\", \"m.login.token\",", "that device, it will be a proper logout self._delete_device(access_token_2, \"kermit\",", "us to the auth page of the OIDC server self.assertEqual(oidc_uri_path,", "params) return channel def test_login_jwt_valid_registered(self) -> None: self.register_user(\"kermit\", \"monkey\") channel", "channel.headers.getRawHeaders(\"Location\") assert location_headers self.assertEqual(location_headers[0][: len(redirect_url)], redirect_url) @override_config({\"sso\": {\"client_whitelist\": [\"https://legit-site.com/\"]}}) def", "like jwt_privatekey. bad_privatekey = \"\\n\".join( [ \"-----BEGIN RSA PRIVATE KEY-----\",", "correctly encoded in the service param - ie, the #", "different device access_token_2 = self.login(\"kermit\", \"monkey\") # more requests with", "the template. channel = self.make_request(\"GET\", cas_ticket_url) # Because the user", "\"<PASSWORD>\") for i in range(0, 6): params = { \"type\":", "try: import jwt HAS_JWT = True except ImportError: HAS_JWT =", "to make requests with the access token channel = self.make_request(b\"GET\",", "(urllib.parse.quote(redirect_url)) ) # Get Synapse to call the fake CAS", "b\"401\", channel.result) @skip_unless(HAS_OIDC, \"requires OIDC\") class UsernamePickerTestCase(HomeserverTestCase): \"\"\"Tests for the", "(\"Content-Length\", str(len(content))), ], ) self.assertEqual(chan.code, 302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\")", "will redirect to cas_uri_params = urllib.parse.parse_qs(cas_uri_query) service_uri = cas_uri_params[\"service\"][0] _,", "jwt_secret) -> str: # PyJWT 2.0.0 changed the return type", "\"root\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Invalid JWT\") @override_config({\"jwt_config\":", "self.assertCountEqual( flows[\"m.login.sso\"][\"identity_providers\"], [ {\"id\": \"cas\", \"name\": \"CAS\"}, {\"id\": \"saml\", \"name\":", ") def test_multi_sso_redirect(self) -> None: \"\"\"/login/sso/redirect should redirect to an", "channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\")", "access_token = channel.json_body[\"access_token\"] device_id = channel.json_body[\"device_id\"] # we should now", "\"notsecret\") self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation", "str, device_id: str ) -> None: \"\"\"Perform the UI-Auth to", "query.update({\"redirectUrl\": redirect_url}) query.update({\"ticket\": \"ticket\"}) url_parts[4] = urllib.parse.urlencode(query) cas_ticket_url = urllib.parse.urlunparse(url_parts)", "channel = self.make_request( b\"DELETE\", \"devices/\" + device_id, access_token=access_token ) self.assertEqual(channel.code,", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "and serve the template. channel = self.make_request(\"GET\", cas_ticket_url) self.assertEqual(channel.code, 302)", "/ 1000.0 + 1.0) params = { \"type\": \"m.login.password\", \"identifier\":", "# Because the user is deactivated they are served an", "from synapse.types import create_requester from synapse.util import Clock from tests", "self.assertEqual(cas_uri_path, CAS_SERVER + \"/login\") # check that the redirectUrl is", "log out all of the user's sessions channel = self.make_request(b\"POST\",", "\"IDP1\"}, {\"id\": \"oidc\", \"name\": \"OIDC\"}, ], ) def test_multi_sso_redirect(self) ->", "\"address\": {\"per_second\": 10000, \"burst_count\": 10000}, \"failed_attempts\": {\"per_second\": 0.17, \"burst_count\": 5},", "link for each href returned_idps: List[str] = [] for link", "= urllib.parse.parse_qs(service_uri_query) self.assertEqual(service_uri_params[\"redirectUrl\"][0], TEST_CLIENT_REDIRECT_URL) def test_multi_sso_redirect_to_saml(self) -> None: \"\"\"If SAML", "a confirmation page before redirecting a user to the redirect", "\"pick_idp\") params = urllib.parse.parse_qs(query) self.assertEqual(params[\"redirectUrl\"], [TEST_CLIENT_REDIRECT_URL]) returned_idps.append(params[\"idp\"][0]) self.assertCountEqual(returned_idps, [\"cas\", \"oidc\",", "link.split(\"?\", 1) self.assertEqual(path, \"pick_idp\") params = urllib.parse.parse_qs(query) self.assertEqual(params[\"redirectUrl\"], [TEST_CLIENT_REDIRECT_URL]) returned_idps.append(params[\"idp\"][0])", "self.jwt_algorithm) if isinstance(result, bytes): return result.decode(\"ascii\") return result def jwt_login(self,", "the user). self._test_redirect(redirect_url) # Deactivate the account. self.get_success( self.deactivate_account_handler.deactivate_account( self.user_id,", "= self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\")", "channel = self.jwt_login({\"username\": \"root\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"],", "serve the template. channel = self.make_request(\"GET\", cas_ticket_url) # Test that", "\"kermit\", \"aud\": \"test-audience\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An", "strict_parsing=True, errors=\"strict\" ) self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # finally, submit", "and serve the template. channel = self.make_request(\"GET\", cas_ticket_url) # Test", "None: config[\"jwt_config\"] = self.base_config return config def jwt_encode(self, payload: Dict[str,", "login flows we expect to appear in the list after", "some tests. It uses an http:// scheme because # FakeChannel.isSecure()", "whitelisted url\"\"\" self._test_redirect(\"https://legit-site.com/\") @override_config({\"public_baseurl\": \"https://example.com\"}) def test_cas_redirect_login_fallback(self) -> None: self._test_redirect(\"https://example.com/_matrix/static/client/login\")", "channel = self.helper.complete_oidc_auth(oidc_uri, cookies, {\"sub\": \"user1\"}) # that should serve", "result.decode(\"ascii\") return result def jwt_login(self, *args: Any) -> FakeChannel: params", "{**base_config, \"audiences\": [\"test-audience\"]}}) def test_login_aud(self) -> None: \"\"\"Test validating the", "with multiple SSO providers enabled\"\"\" servlets = [ login.register_servlets, ]", "login API, which gives us our # matrix access token,", "\"enabled\": True, \"secret\": self.jwt_pubkey, \"algorithm\": \"RS256\", } return config def", "def test_cas_redirect_confirm(self) -> None: \"\"\"Tests that the SSO login flow", "LOGIN_URL, params) if i == 5: self.assertEqual(channel.result[\"code\"], b\"429\", channel.result) retry_after_ms", "b\"DELETE\", \"devices/\" + device_id, access_token=access_token ) self.assertEqual(channel.code, 401, channel.result) #", "assert location_headers saml_uri = location_headers[0] saml_uri_path, saml_uri_query = saml_uri.split(\"?\", 1)", "caveat.caveat_id.startswith(prefix): return caveat.caveat_id[len(prefix) :] raise ValueError(\"No %s caveat in macaroon\"", "keys, but # good enough for tests!) jwt_privatekey = \"\\n\".join(", "be soft-logouted channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result)", "for JWT is missing\") # The JWTPubKeyTestCase is a complement", "= [ {\"type\": \"m.login.application_service\"}, {\"type\": \"uk.half-shot.msc2778.login.application_service\"}, ] class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets", "(not a secure way to generate real keys, but #", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_NOT_FOUND\") def test_client_idp_redirect_to_oidc(self) -> None: \"\"\"If the client", "Copyright 2019-2021 The Matrix.org Foundation C.I.C. # # Licensed under", "Mock from urllib.parse import urlencode import pymacaroons from twisted.test.proto_helpers import", "# log in as normal params = { \"type\": \"m.login.password\",", "str, user_id: str, password: str, device_id: str ) -> None:", "... and should have set a cookie including the redirect", "= b\"/_matrix/client/r0/login\" TEST_URL = b\"/_matrix/client/r0/account/whoami\" # a (valid) url with", "query = dict(urllib.parse.parse_qsl(url_parts[4])) query.update({\"redirectUrl\": redirect_url}) query.update({\"ticket\": \"ticket\"}) url_parts[4] = urllib.parse.urlencode(query)", "self.make_request( \"POST\", path=picker_url, content=content, content_is_form=True, custom_headers=[ (\"Cookie\", \"username_mapping_session=\" + session_id),", "a soft-logout self.reactor.advance(3600) channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401,", "oidc_uri.split(\"?\", 1) # it should redirect us to the auth", "urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) return self.make_request( \"GET\", endpoint, custom_headers=[(\"Host\", SYNAPSE_SERVER_PUBLIC_HOSTNAME)], ) @staticmethod def", "logout this session channel = self.make_request(b\"POST\", \"/logout\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\",", "def test_login_aud_no_config(self) -> None: \"\"\"Test providing an audience without requiring", "channel = self.jwt_login({\"sub\": \"frog\"}, \"notsecret\") self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\")", "redirect_url: str) -> None: \"\"\"Tests that the SSO login flow", "enough to tell pysaml2 where to redirect to SAML_SERVER =", "from synapse.rest.client import devices, login, logout, register from synapse.rest.client.account import", "token, mxid, and device id. login_token = params[2][1] chan =", "1) self.assertEqual(path, \"pick_idp\") params = urllib.parse.parse_qs(query) self.assertEqual(params[\"redirectUrl\"], [TEST_CLIENT_REDIRECT_URL]) returned_idps.append(params[\"idp\"][0]) self.assertCountEqual(returned_idps,", "\"username\" self.user_id = \"@%s:test\" % cas_user_id async def get_raw(uri: str,", "jwt_config has been defined (eg via @override_config), don't replace it.", "pubkey is used as the jwt_secret setting of synapse. Valid", "username picker flow.\"\"\" # do the start of the login", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_with_overly_long_device_id_fails(self) -> None: self.register_user(\"mickey\", \"<PASSWORD>\") #", "deactivated\", channel.result[\"body\"]) @skip_unless(HAS_JWT, \"requires jwt\") class JWTTestCase(unittest.HomeserverTestCase): servlets = [", "class html = channel.result[\"body\"].decode(\"utf-8\") p = TestHtmlParser() p.feed(html) p.close() #", "= location_headers[0] saml_uri_path, saml_uri_query = saml_uri.split(\"?\", 1) # it should", "secure way to generate real keys, but # good enough", "picker self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers picker_url", "-> None: self.register_user(\"kermit\", \"monkey\") # log in as normal access_token", "the configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"200\",", "multiple SSO providers enabled\"\"\" servlets = [ login.register_servlets, ] def", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "than 1min. self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms / 1000.0) params =", "isinstance(result, bytes): return result.decode(\"ascii\") return result def jwt_login(self, *args: Any)", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config( { \"rc_login\": { \"account\": {\"per_second\": 0.17,", "str(i)}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"],", "\"token\": self.jwt_encode(*args)} channel = self.make_request(b\"POST\", LOGIN_URL, params) return channel def", "serve the template. channel = self.make_request(\"GET\", cas_ticket_url) # Because the", "= self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"],", "lambda hs, http_server: WhoamiRestServlet(hs).register(http_server), ] def make_homeserver(self, reactor: MemoryReactor, clock:", "channel.json_body[\"flows\"]], expected_flow_types ) flows = {flow[\"type\"]: flow for flow in", "device_id, } # make a login request with the bad", "TEST_CLIENT_REDIRECT_URL, ) channel = self.helper.complete_oidc_auth(oidc_uri, cookies, {\"sub\": \"user1\"}) # that", "session_id)], ) self.assertEqual(chan.code, 302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers", "with the bad device_id channel = self.make_request( \"POST\", \"/_matrix/client/v3/login\", json.dumps(body).encode(\"utf8\"),", "the `/proxyValidate` endpoint of a CAS server, copied from https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20", "% (SYNAPSE_SERVER_PUBLIC_HOSTNAME,) # CAS server used in some tests CAS_SERVER", "cas_user_id ).encode(\"utf-8\") mocked_http_client = Mock(spec=[\"get_raw\"]) mocked_http_client.get_raw.side_effect = get_raw self.hs =", "\"frog\"}, \"notsecret\") self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT", "-> None: self.register_user(\"kermit\", \"monkey\") # we shouldn't be able to", "in range(0, 6): self.register_user(\"kermit\" + str(i), \"monkey\") for i in", "Apache License, Version 2.0 (the \"License\"); # you may not", "\"24h\"}) def test_soft_logout(self) -> None: self.register_user(\"kermit\", \"monkey\") # we shouldn't", "redirect us to the login page of the cas server", "= location_headers[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") # it will have url-encoded", "\"name\": \"OIDC\"}, ], ) def test_multi_sso_redirect(self) -> None: \"\"\"/login/sso/redirect should", "either express or implied. # See the License for the", "deleting the expired device # # we now log in", "# more requests with the expired token should still return", "b\"POST\", LOGIN_URL, params, access_token=self.another_service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_no_token(self)", "result def jwt_login(self, *args: Any) -> FakeChannel: params = {\"type\":", "channel = self.jwt_login({\"sub\": \"frog\", \"nbf\": now + 3600}) self.assertEqual(channel.result[\"code\"], b\"403\",", "config[\"sso\"] = {\"client_whitelist\": [\"https://x\"]} return config def create_resource_dict(self) -> Dict[str,", "return value) because the corresponding Synapse code awaits on it.", "signed by this and validated using the pubkey. It is", "for the username picker flow of SSO login\"\"\" servlets =", "UI-Auth to delete a device\"\"\" channel = self.make_request( b\"DELETE\", \"devices/\"", ") def test_login_iss_no_config(self) -> None: \"\"\"Test providing an issuer claim", "\"password\": \"<PASSWORD>\", \"device_id\": device_id, } # make a login request", "config = super().default_config() config[\"jwt_config\"] = { \"enabled\": True, \"secret\": self.jwt_pubkey,", "import HomeServer from synapse.types import create_requester from synapse.util import Clock", "in the list after the normal # ones ADDITIONAL_LOGIN_FLOWS =", "user_id}, \"user\": user_id, \"password\": password, \"session\": channel.json_body[\"session\"], } channel =", "synapse.server import HomeServer from synapse.types import create_requester from synapse.util import", "to the login page of the SAML server self.assertEqual(saml_uri_path, SAML_SERVER)", "# it should redirect us to the login page of", "self.get_success( self.deactivate_account_handler.deactivate_account( self.user_id, False, create_requester(self.user_id) ) ) # Request the", "channel = self.make_request(b\"POST\", LOGIN_URL, params) return channel def test_login_jwt_valid(self) ->", "synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ] jwt_secret = \"secret\" jwt_algorithm = \"HS256\" base_config", "key placed in foo.key (jwt_privatekey). jwt_pubkey = \"\\n\".join( [ \"-----BEGIN", "serve a redirect # to the completion page content =", "here, we need to set this manually as well \"account\":", "self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid audience\" ) def test_login_default_sub(self)", "self.user_id, False, create_requester(self.user_id) ) ) # Request the CAS ticket.", "audience. channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\")", "\"enabled\": True, \"server_url\": CAS_SERVER, \"service_url\": \"https://matrix.goodserver.com:8448\", } config[\"saml2_config\"] = {", "self._make_sso_redirect_request(\"xxx\") self.assertEqual(channel.code, 404, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_NOT_FOUND\") def test_client_idp_redirect_to_oidc(self) -> None:", "403) self.assertIn(b\"SSO account deactivated\", channel.result[\"body\"]) @skip_unless(HAS_JWT, \"requires jwt\") class JWTTestCase(unittest.HomeserverTestCase):", "Signature verification failed\", ) AS_USER = \"as_user_alice\" class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): servlets", "RSA PRIVATE KEY-----\", \"<KEY>\", \"<KEY>\", \"<KEY>KVaZ/gTOM9+9MwlmhidrUOweKfB/\", \"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq\", \"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN\", \"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA\", \"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=\",", "[], }, ) self.another_service = ApplicationService( id=\"another__identifier\", token=\"another_token\", hostname=\"example.com\", sender=\"@as2bot:example.com\",", "cas_uri_path, cas_uri_query = cas_uri.split(\"?\", 1) # it should redirect us", "service_uri = cas_uri_params[\"service\"][0] _, service_uri_query = service_uri.split(\"?\", 1) service_uri_params =", "def test_POST_ratelimiting_per_account_failed_attempts(self) -> None: self.register_user(\"kermit\", \"<PASSWORD>\") for i in range(0,", "a deactivated account should error.\"\"\" redirect_url = \"https://legit-site.com/\" # First", "self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000) # Now, submit a username to the", "self.register_user(\"kermit\", \"monkey\") for i in range(0, 6): params = {", "= { \"type\": \"m.login.password\", \"user\": \"mickey\", \"password\": \"<PASSWORD>\", \"device_id\": device_id,", "self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], 'JWT validation failed: Token is missing", "Synapse code awaits on it. \"\"\" return ( \"\"\" <cas:serviceResponse", "= self.make_request(\"GET\", uri) self.assertEqual(channel.code, 200, channel.result) # parse the form", "\"-----END PUBLIC KEY-----\", ] ) # This key is used", ") def test_POST_ratelimiting_per_account(self) -> None: self.register_user(\"kermit\", \"monkey\") for i in", "an issuer. channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "+ device_id, access_token=access_token, content={\"auth\": auth}, ) self.assertEqual(channel.code, 200, channel.result) @override_config({\"session_lifetime\":", "302 to the client redirectUrl chan = self.make_request( \"GET\", path=location_headers[0],", "used to carry the client redirect url saml_uri_params = urllib.parse.parse_qs(saml_uri_query)", "# first hit the redirect url, which should redirect to", "= { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": AS_USER}, }", "login token out of the returned redirect uri login_token =", "# PyJWT 2.0.0 changed the return type of jwt.encode from", "template. channel = self.make_request(\"GET\", cas_ticket_url) # Because the user is", "synapse.rest.client import devices, login, logout, register from synapse.rest.client.account import WhoamiRestServlet", "= True self.hs.config.registration.registrations_require_3pid = [] self.hs.config.registration.auto_join_rooms = [] self.hs.config.captcha.enable_registration_captcha =", "server, copied from https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20 This needs to be returned by", "audience claim.\"\"\" # A valid audience. channel = self.jwt_login({\"sub\": \"kermit\",", "wrong as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE,", "\"user\": \"mickey\", \"password\": \"<PASSWORD>\", \"device_id\": device_id, } # make a", "key configured in synapse as \"jwt_secret\", and tokens # signed", "None: \"\"\"Test that the appservice bot can use /login\"\"\" self.register_appservice_user(AS_USER,", "super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"oidc_config\"] = {} config[\"oidc_config\"].update(TEST_OIDC_CONFIG) config[\"oidc_config\"][\"user_mapping_provider\"] =", "twisted.test.proto_helpers import MemoryReactor from twisted.web.resource import Resource import synapse.rest.admin from", "params in TEST_CLIENT_REDIRECT_URL EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [(\"<ab c>\", \"\"), ('q\" =+\"',", "\"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Invalid JWT\") @override_config({\"jwt_config\": {**base_config, \"issuer\": \"test-issuer\"}}) def test_login_iss(self)", "returned by an async function (as opposed to set as", "the service param - ie, the # place that CAS", "= \"https://test.saml.server/idp/sso\" TEST_SAML_METADATA = \"\"\" <md:EntityDescriptor xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\"> <md:IDPSSODescriptor protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\"> <md:SingleSignOnService", "= { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\"}, \"password\":", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "\"@kermit:test\") # An invalid audience. channel = self.jwt_login({\"sub\": \"kermit\", \"aud\":", "EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # fish the login token out of", "params) self.assertEqual(channel.code, 200, channel.result) access_token = channel.json_body[\"access_token\"] device_id = channel.json_body[\"device_id\"]", "mock's return value) because the corresponding Synapse code awaits on", ") return self.hs def prepare(self, reactor: MemoryReactor, clock: Clock, hs:", "TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG from tests.server import FakeChannel from tests.test_utils.html_parsers import TestHtmlParser", "# (possibly experimental) login flows we expect to appear in", "\"userinfo_endpoint\": \"https://issuer1/userinfo\", \"user_mapping_provider\": { \"config\": {\"localpart_template\": \"{{ user.sub }}\"} },", "self.make_request( b\"DELETE\", \"devices/\" + device_id, access_token=access_token ) self.assertEqual(channel.code, 401, channel.result)", "self.another_service = ApplicationService( id=\"another__identifier\", token=\"another_token\", hostname=\"example.com\", sender=\"@as2bot:example.com\", namespaces={ ApplicationService.NS_USERS: [", "self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config( { \"rc_login\": { # Prevent the", "to a whitelisted url\"\"\" self._test_redirect(\"https://legit-site.com/\") @override_config({\"public_baseurl\": \"https://example.com\"}) def test_cas_redirect_login_fallback(self) ->", "complement to JWTTestCase where we instead use # RSS256, with", "\"\"\"Logging in as a deactivated account should error.\"\"\" redirect_url =", "\"exclusive\": False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], }, ) self.hs.get_datastores().main.services_cache.append(self.service)", "= key + \" = \" for caveat in macaroon.caveats:", "Now, submit a username to the username picker, which should", "-> Dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def", "self.make_request(\"GET\", cas_ticket_url) self.assertEqual(channel.code, 302) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers self.assertEqual(location_headers[0][:", "return channel def test_login_jwt_valid_registered(self) -> None: self.register_user(\"kermit\", \"monkey\") channel =", "access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_user_bot(self) -> None: \"\"\"Test", "{ \"idp_id\": \"idp1\", \"idp_name\": \"IDP1\", \"discover\": False, \"issuer\": \"https://issuer1\", \"client_id\":", "for flow in channel.json_body[\"flows\"]} self.assertCountEqual( flows[\"m.login.sso\"][\"identity_providers\"], [ {\"id\": \"cas\", \"name\":", "raise ValueError(\"No %s caveat in macaroon\" % (key,)) class CASTestCase(unittest.HomeserverTestCase):", "-> None: \"\"\"Test providing an audience without requiring it in", "* 512 body = { \"type\": \"m.login.password\", \"user\": \"mickey\", \"password\":", "create the user). self._test_redirect(redirect_url) # Deactivate the account. self.get_success( self.deactivate_account_handler.deactivate_account(", "missing\") # The JWTPubKeyTestCase is a complement to JWTTestCase where", "import MemoryReactor from twisted.web.resource import Resource import synapse.rest.admin from synapse.appservice", "session # looks ok. username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions self.assertIn( session_id, username_mapping_sessions,", "Not providing an issuer. channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\",", "completion page content = urlencode({b\"username\": b\"bobby\"}).encode(\"utf8\") chan = self.make_request( \"POST\",", "to the SAML server\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" +", "to carry the client redirect url saml_uri_params = urllib.parse.parse_qs(saml_uri_query) relay_state_param", "the login page of the cas server self.assertEqual(cas_uri_path, CAS_SERVER +", "from bytes to str. result: Union[bytes, str] = jwt.encode(payload, secret,", "channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers oidc_uri = location_headers[0] oidc_uri_path,", "device_id = channel.json_body[\"device_id\"] # we should now be able to", "# Get Synapse to call the fake CAS and serve", "self.hs def test_login_appservice_user(self) -> None: \"\"\"Test that an appservice user", "\"m.login.password\", \"user\": \"mickey\", \"password\": \"<PASSWORD>\", \"device_id\": device_id, } # make", "build_synapse_client_resource_tree from synapse.server import HomeServer from synapse.types import create_requester from", "= pymacaroons.Macaroon.deserialize(oidc_session_cookie) self.assertEqual( self._get_value_from_macaroon(macaroon, \"client_redirect_url\"), TEST_CLIENT_REDIRECT_URL, ) channel = self.helper.complete_oidc_auth(oidc_uri,", "test_login_appservice_user_bot(self) -> None: \"\"\"Test that the appservice bot can use", "params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": self.service.sender},", "def test_login_default_sub(self) -> None: \"\"\"Test reading user ID from the", "b\"POST\", LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_wrong_as(self)", "username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions self.assertIn( session_id, username_mapping_sessions, \"session id not found", "WhoamiRestServlet from synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server import HomeServer from", "self.register_user(\"kermit\", \"monkey\") channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"],", "instead use # RSS256, with a public key configured in", "\"{{ user.sub }}\"} }, } ] return config def create_resource_dict(self)", "self.deactivate_account_handler = hs.get_deactivate_account_handler() def test_cas_redirect_confirm(self) -> None: \"\"\"Tests that the", "error.\"\"\" redirect_url = \"https://legit-site.com/\" # First login (to create the", "def jwt_login(self, *args: Any) -> FakeChannel: params = {\"type\": \"org.matrix.login.jwt\",", "= self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def", "test_multi_sso_redirect_to_saml(self) -> None: \"\"\"If SAML is chosen, should redirect to", "-> None: \"\"\"Logging in as a deactivated account should error.\"\"\"", "b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: The", "> 0) # And that it contains our redirect link", ") self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # fish the login token", "password and SSO flows\"\"\" channel = self.make_request(\"GET\", \"/_matrix/client/r0/login\") self.assertEqual(channel.code, 200,", "the form to check it has fields assumed elsewhere in", "return a soft-logout self.reactor.advance(3600) channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code,", "\"cas\", \"name\": \"CAS\"}, {\"id\": \"saml\", \"name\": \"SAML\"}, {\"id\": \"oidc-idp1\", \"name\":", "= \"http://%s/\" % (SYNAPSE_SERVER_PUBLIC_HOSTNAME,) # CAS server used in some", "= \" for caveat in macaroon.caveats: if caveat.caveat_id.startswith(prefix): return caveat.caveat_id[len(prefix)", "a username_mapping_session cookie cookies: Dict[str, str] = {} channel.extract_cookies(cookies) self.assertIn(\"username_mapping_session\",", "channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_after_being_soft_logged_out(self) -> None: self.register_user(\"kermit\", \"monkey\") #", "to the CAS server\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" +", "\"account\": {\"per_second\": 10000, \"burst_count\": 10000}, } } ) def test_POST_ratelimiting_per_address(self)", "device, it will be a proper logout self._delete_device(access_token_2, \"kermit\", \"monkey\",", "# Prevent the address login ratelimiter from raising first #", "to str. result: Union[str, bytes] = jwt.encode(payload, secret, self.jwt_algorithm) if", "\"POST\", \"/_matrix/client/v3/login\", json.dumps(body).encode(\"utf8\"), custom_headers=None, ) # test that the login", "SAML is chosen, should redirect to the SAML server\"\"\" channel", ").encode(\"utf-8\") mocked_http_client = Mock(spec=[\"get_raw\"]) mocked_http_client.get_raw.side_effect = get_raw self.hs = self.setup_test_homeserver(", "\"identifier\": {\"type\": \"m.id.user\", \"user\": self.service.sender}, } channel = self.make_request( b\"POST\",", "# If jwt_config has been defined (eg via @override_config), don't", "should serve a redirect # to the completion page content", "\"\"\" <cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'> <cas:authenticationSuccess> <cas:user>%s</cas:user> <cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket> <cas:proxies> <cas:proxy>https://proxy2/pgtUrl</cas:proxy> <cas:proxy>https://proxy1/pgtUrl</cas:proxy> </cas:proxies>", "Resource import synapse.rest.admin from synapse.appservice import ApplicationService from synapse.rest.client import", "= self.jwt_login({\"sub\": \"kermit\", \"aud\": \"test-audience\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\")", "import jwt HAS_JWT = True except ImportError: HAS_JWT = False", "pysaml2 where to redirect to SAML_SERVER = \"https://test.saml.server/idp/sso\" TEST_SAML_METADATA =", "default_config(self) -> Dict[str, Any]: config = super().default_config() config[\"public_baseurl\"] = BASE_URL", "\"name\": \"IDP1\"}, {\"id\": \"oidc\", \"name\": \"OIDC\"}, ], ) def test_multi_sso_redirect(self)", "= self.make_request( b\"DELETE\", \"devices/\" + device_id, access_token=access_token ) self.assertEqual(channel.code, 401,", "test_cas_redirect_confirm(self) -> None: \"\"\"Tests that the SSO login flow serves", "hostname=\"example.com\", sender=\"@as2bot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as2_user.*\", \"exclusive\": False} ],", "cannot login with the as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params =", "to the login API, which gives us our # matrix", "None: \"\"\"Test that non-as users cannot login with the as", "us to the login page of the cas server self.assertEqual(cas_uri_path,", "= [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, logout.register_servlets, devices.register_servlets, lambda hs, http_server: WhoamiRestServlet(hs).register(http_server),", "validating the audience claim.\"\"\" # A valid audience. channel =", "device # # we now log in as a different", "\"displayname\": \"Jonny\"}, TEST_CLIENT_REDIRECT_URL ) # that should redirect to the", "Optional[str] = None) -> FakeChannel: \"\"\"Send a request to /_matrix/client/r0/login/sso/redirect", "PRIVATE KEY-----\", ] ) def default_config(self) -> Dict[str, Any]: config", "account login ratelimiter from raising first # # This is", "] ) def default_config(self) -> Dict[str, Any]: config = super().default_config()", "jwt_algorithm, } def default_config(self) -> Dict[str, Any]: config = super().default_config()", "{ \"sp_config\": { \"metadata\": {\"inline\": [TEST_SAML_METADATA]}, # use the XMLSecurity", "\"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m\", \"-----END RSA PRIVATE KEY-----\", ] ) def default_config(self) ->", "self.reactor.advance(24 * 3600) # ... and we should be soft-logouted", "redirect to our idp picker channel = self._make_sso_redirect_request(None) self.assertEqual(channel.code, 302,", "b\"403\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_soft_logout(self) -> None: self.register_user(\"kermit\", \"monkey\")", "\"requires jwt\") class JWTPubKeyTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, ] #", "use this file except in compliance with the License. #", "def test_login_jwt_expired(self) -> None: channel = self.jwt_login({\"sub\": \"frog\", \"exp\": 864000})", "sessions channel = self.make_request(b\"POST\", \"/logout/all\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def", "href returned_idps: List[str] = [] for link in p.links: path,", "= urllib.parse.urlencode(query) cas_ticket_url = urllib.parse.urlunparse(url_parts) # Get Synapse to call", "is None: config[\"jwt_config\"] = self.base_config return config def jwt_encode(self, payload:", "channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_all_sessions_after_being_soft_logged_out( self, ) -> None: self.register_user(\"kermit\",", "False # synapse server name: used to populate public_baseurl in", ") def test_cas_redirect_whitelisted(self) -> None: \"\"\"Tests that the SSO login", "server self.assertEqual(cas_uri_path, CAS_SERVER + \"/login\") # check that the redirectUrl", "import HomeserverTestCase, override_config, skip_unless try: import jwt HAS_JWT = True", "used to populate public_baseurl in some tests SYNAPSE_SERVER_PUBLIC_HOSTNAME = \"synapse\"", "TEST_CLIENT_REDIRECT_URL EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [(\"<ab c>\", \"\"), ('q\" =+\"', '\"fö&=o\"')] #", "cas server self.assertEqual(cas_uri_path, CAS_SERVER + \"/login\") # check that the", "self.hs.get_sso_handler()._username_mapping_sessions self.assertIn( session_id, username_mapping_sessions, \"session id not found in map\",", "the correct error code self.assertEqual(channel.code, 400) self.assertEqual(channel.json_body[\"errcode\"], \"M_INVALID_PARAM\") @skip_unless(has_saml2 and", "login flow channel = self.helper.auth_via_oidc( {\"sub\": \"tester\", \"displayname\": \"Jonny\"}, TEST_CLIENT_REDIRECT_URL", "= location_headers[0] oidc_uri_path, oidc_uri_query = oidc_uri.split(\"?\", 1) # it should", "requiring it in the configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\", \"aud\":", "async function (as opposed to set as the mock's return", "providing an issuer claim without requiring it in the configuration.\"\"\"", "uri login_token = params[2][1] # finally, submit the matrix login", "(nbf)\", ) def test_login_no_sub(self) -> None: channel = self.jwt_login({\"username\": \"root\"})", "more requests with the expired token should still return a", "else: self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) # Since we're ratelimiting at 1", "} ) def test_cas_redirect_whitelisted(self) -> None: \"\"\"Tests that the SSO", "1min. self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms / 1000.0) params = {", "parse the form to check it has fields assumed elsewhere", "b\"POST\", LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_wrong_user(self)", "(jwt_privatekey). jwt_pubkey = \"\\n\".join( [ \"-----BEGIN PUBLIC KEY-----\", \"<KEY>\", \"<KEY>", "with the as token\"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\":", "} LOGIN_URL = b\"/_matrix/client/r0/login\" TEST_URL = b\"/_matrix/client/r0/account/whoami\" # a (valid)", "has_saml2 from tests.rest.client.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG from tests.server import FakeChannel", "= TestHtmlParser() p.feed(html) p.close() # there should be a link", "should now be able to make requests with the access", "the \"iss\" claim', ) def test_login_iss_no_config(self) -> None: \"\"\"Test providing", "params, access_token=self.another_service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_no_token(self) -> None:", "location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers cas_uri = location_headers[0] cas_uri_path, cas_uri_query", "# it will have url-encoded the params properly, so we'll", "the happy path of a username picker flow.\"\"\" # do", "which sets these values to 10000, but as we're overriding", "+ device_id, access_token=access_token ) self.assertEqual(channel.code, 401, channel.result) # check it's", "the redirect URL. \"\"\" base_url = \"/_matrix/client/r0/login/cas/ticket?redirectUrl\" redirect_url = \"https://dodgy-site.com/\"", "validation failed: Invalid issuer\" ) # Not providing an issuer.", "[], ApplicationService.NS_ALIASES: [], }, ) self.another_service = ApplicationService( id=\"another__identifier\", token=\"another_token\",", "Test that the body isn't empty. self.assertTrue(len(channel.result[\"body\"]) > 0) #", "\"org.matrix.login.jwt\", \"token\": self.jwt_encode(*args)} channel = self.make_request(b\"POST\", LOGIN_URL, params) return channel", "self.jwt_encode(*args)} channel = self.make_request(b\"POST\", LOGIN_URL, params) return channel def test_login_jwt_valid(self)", "validation failed: Invalid audience\" ) def test_login_default_sub(self) -> None: \"\"\"Test", "just enough to tell pysaml2 where to redirect to SAML_SERVER", "+ \"&idp=cas\", shorthand=False, ) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\")", "saml_uri_params[\"RelayState\"][0] self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL) def test_login_via_oidc(self) -> None: \"\"\"If OIDC is", "{\"inline\": [TEST_SAML_METADATA]}, # use the XMLSecurity backend to avoid relying", "\"monkey\") # more requests with the expired token should still", "macaroon.caveats: if caveat.caveat_id.startswith(prefix): return caveat.caveat_id[len(prefix) :] raise ValueError(\"No %s caveat", "the RelayState is used to carry the client redirect url", "params) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) @skip_unless(HAS_OIDC, \"requires OIDC\") class UsernamePickerTestCase(HomeserverTestCase): \"\"\"Tests", "KEY-----\", \"<KEY>\", \"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L\", \"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY\", \"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I\", \"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb\", \"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0\", \"KydN6cRLvphNQ9c/vBTdlzWxzcSxREpguC7F1J1m\", \"-----END RSA", "channel = self.jwt_login({\"username\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def", "self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_invalid_signature(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"},", "auth page of the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) def _make_sso_redirect_request(self,", "IdP should cause a 400\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz\",", "get_raw(uri: str, args: Any) -> bytes: \"\"\"Return an example response", "(as opposed to set as the mock's return value) because", "cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url)) ) # Get Synapse", "is a complement to JWTTestCase where we instead use #", "in compliance with the License. # You may obtain a", "None: \"\"\"Tests that the SSO login flow serves a confirmation", "-> None: \"\"\"GET /login should return password and SSO flows\"\"\"", "redirecting a user to the redirect URL. \"\"\" base_url =", "OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) # ... and should have set", "subject claim.\"\"\" channel = self.jwt_login({\"username\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"],", "HTML. self.assertEqual(channel.code, 200, channel.result) content_type_header_value = \"\" for header in", "software # distributed under the License is distributed on an", "the UI-Auth to delete a device\"\"\" channel = self.make_request( b\"DELETE\",", "} ] return config def create_resource_dict(self) -> Dict[str, Resource]: d", "missing the \"iss\" claim', ) def test_login_iss_no_config(self) -> None: \"\"\"Test", "channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"test-audience\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"],", "# Deactivate the account. self.get_success( self.deactivate_account_handler.deactivate_account( self.user_id, False, create_requester(self.user_id) )", ") # Generated with `openssl rsa -in foo.key -pubout`, with", "MemoryReactor, clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver() self.service =", "def test_login_appservice_user_bot(self) -> None: \"\"\"Test that the appservice bot can", "properly, so we'll have to parse them params = urllib.parse.parse_qsl(", "test_client_idp_redirect_to_oidc(self) -> None: \"\"\"If the client pick a known IdP,", "issuer claim without requiring it in the configuration.\"\"\" channel =", "\"exclusive\": False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], }, ) self.another_service", "config def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_secret)", "# a (valid) url with some annoying characters in. %3D", "make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver()", "512 characters device_id = \"yolo\" * 512 body = {", "-> None: self.register_user(\"kermit\", \"<PASSWORD>\") for i in range(0, 6): params", "skip_unless try: import jwt HAS_JWT = True except ImportError: HAS_JWT", "dict here, we need to set this manually as well", "clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver() self.hs.config.registration.enable_registration = True", "hs: HomeServer) -> None: self.deactivate_account_handler = hs.get_deactivate_account_handler() def test_cas_redirect_confirm(self) ->", "\"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid issuer\" ) #", "happy path of a username picker flow.\"\"\" # do the", "https://.... BASE_URL = \"http://%s/\" % (SYNAPSE_SERVER_PUBLIC_HOSTNAME,) # CAS server used", "lower # than 1min. self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms / 1000.0", "\"\"), ('q\" =+\"', '\"fö&=o\"')] # (possibly experimental) login flows we", "def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer: self.base_url =", "login.register_servlets, ] # This key's pubkey is used as the", "# Since we're ratelimiting at 1 request/min, retry_after_ms should be", "flow.\"\"\" # do the start of the login flow channel", "login request with the bad device_id channel = self.make_request( \"POST\",", "= self.make_request( \"POST\", \"/login\", content={\"type\": \"m.login.token\", \"token\": login_token}, ) self.assertEqual(chan.code,", "method \"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\":", "{flow[\"type\"]: flow for flow in channel.json_body[\"flows\"]} self.assertCountEqual( flows[\"m.login.sso\"][\"identity_providers\"], [ {\"id\":", ") self.hs.get_datastores().main.services_cache.append(self.service) self.hs.get_datastores().main.services_cache.append(self.another_service) return self.hs def test_login_appservice_user(self) -> None: \"\"\"Test", "ApplicationService from synapse.rest.client import devices, login, logout, register from synapse.rest.client.account", "=, %26 is &, %2B is + TEST_CLIENT_REDIRECT_URL = 'https://x?<ab", "expect to appear in the list after the normal #", "self.hs.get_datastores().main.services_cache.append(self.service) self.hs.get_datastores().main.services_cache.append(self.another_service) return self.hs def test_login_appservice_user(self) -> None: \"\"\"Test that", "= self.setup_test_homeserver( config=config, proxied_http_client=mocked_http_client, ) return self.hs def prepare(self, reactor:", "jwt_privatekey) -> str: # PyJWT 2.0.0 changed the return type", "able to make requests without an access token channel =", "512 body = { \"type\": \"m.login.password\", \"user\": \"mickey\", \"password\": \"<PASSWORD>\",", "out all of the user's sessions channel = self.make_request(b\"POST\", \"/logout/all\",", "\"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\", \"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L\", \"R6HRZcLExZuUrWI+5ZSP7TaZ1uwJzGFspDrunqaVoPobndw/8VsP8HFyKtceC7vY\", \"uQIhAPdYInDDSJ8rFKGiy3Ajv5KWISBicjevWHF9dbotmNO9AiEAxrdRJVU+EI9I\", \"eB4qRZpY6n4pnwyP0p8f/A3NBaQPG+cCIFlj08aW/PbxNdqYoBdeBA0xDrXKfmbb\", \"iwYxBkwL0JCtAiBYmsi94sJn09u2Y4zpuCbJeDPKzWkbuwQh+W1fhIWQJQIhAKR0\",", "failed: Token is missing the \"aud\" claim', ) def test_login_aud_no_config(self)", "Signature verification failed\", ) def test_login_jwt_expired(self) -> None: channel =", "flows\"\"\" channel = self.make_request(\"GET\", \"/_matrix/client/r0/login\") self.assertEqual(channel.code, 200, channel.result) expected_flow_types =", "# ... and we should be soft-logouted channel = self.make_request(b\"GET\",", "channel = self.make_request( b\"DELETE\", \"devices/\" + device_id, access_token=access_token, content={\"auth\": auth},", "custom_headers=[(\"Host\", SYNAPSE_SERVER_PUBLIC_HOSTNAME)], ) @staticmethod def _get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str) ->", "None: self.register_user(\"kermit\", \"monkey\") channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "= self.make_request(b\"POST\", LOGIN_URL, params) return channel def test_login_jwt_valid(self) -> None:", "] jwt_secret = \"secret\" jwt_algorithm = \"HS256\" base_config = {", "\"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=saml\", ) self.assertEqual(channel.code, 302, channel.result)", "cookies, {\"sub\": \"user1\"}) # that should serve a confirmation page", "we expect to appear in the list after the normal", "401, channel.result) # check it's a UI-Auth fail self.assertEqual( set(channel.json_body.keys()),", ") def test_login_default_sub(self) -> None: \"\"\"Test reading user ID from", "channel.json_body[\"device_id\"] # we should now be able to make requests", "[\"https://legit-site.com/\"]}}) def test_deactivated_user(self) -> None: \"\"\"Logging in as a deactivated", "import create_requester from synapse.util import Clock from tests import unittest", "-> None: \"\"\"If CAS is chosen, should redirect to the", "self.assertEqual(chan.code, 302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers # ensure", "super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"cas_config\"] = { \"enabled\": True, \"server_url\":", "our # matrix access token, mxid, and device id. login_token", "config[\"cas_config\"] = { \"enabled\": True, \"server_url\": CAS_SERVER, } cas_user_id =", "manually as well \"account\": {\"per_second\": 10000, \"burst_count\": 10000}, } }", ") def test_login_jwt_expired(self) -> None: channel = self.jwt_login({\"sub\": \"frog\", \"exp\":", "config = super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"oidc_config\"] = {} config[\"oidc_config\"].update(TEST_OIDC_CONFIG)", "def test_login_appservice_user(self) -> None: \"\"\"Test that an appservice user can", "claim.\"\"\" channel = self.jwt_login({\"username\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\")", "validated using the pubkey. It is generated # with `openssl", "[\"https://x\"]} return config def create_resource_dict(self) -> Dict[str, Resource]: d =", "channel.headers.getRawHeaders(\"Content-Type\") assert content_type_headers self.assertTrue(content_type_headers[-1].startswith(\"text/html\")) p = TestHtmlParser() p.feed(channel.text_body) p.close() #", "b\"429\", channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) #", "by the default test homeserver config # which sets these", "requests with the expired token should still return a soft-logout", "404\"\"\" channel = self._make_sso_redirect_request(\"xxx\") self.assertEqual(channel.code, 404, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_NOT_FOUND\") def", "assert location_headers uri = location_headers[0] # hitting that picker should", "self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) # Since we're ratelimiting at 1 request/min,", "a redirect # to the completion page content = urlencode({b\"username\":", "self.assertTrue(content_type_headers[-1].startswith(\"text/html\")) p = TestHtmlParser() p.feed(channel.text_body) p.close() # ... which should", "matches the requested redirect URL path, query = location_headers[0].split(\"?\", 1)", "def test_login_jwt_valid_unregistered(self) -> None: channel = self.jwt_login({\"sub\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\",", "self.hs.config.registration.auto_join_rooms = [] self.hs.config.captcha.enable_registration_captcha = False return self.hs @override_config( {", "without an access token channel = self.make_request(b\"GET\", TEST_URL) self.assertEqual(channel.result[\"code\"], b\"401\",", "self.assertEqual(channel.code, 200, channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_after_being_soft_logged_out(self) -> None: self.register_user(\"kermit\",", "<cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket> <cas:proxies> <cas:proxy>https://proxy2/pgtUrl</cas:proxy> <cas:proxy>https://proxy1/pgtUrl</cas:proxy> </cas:proxies> </cas:authenticationSuccess> </cas:serviceResponse> \"\"\" % cas_user_id", "\"JWT validation failed: Invalid issuer\" ) # Not providing an", "normal # ones ADDITIONAL_LOGIN_FLOWS = [ {\"type\": \"m.login.application_service\"}, {\"type\": \"uk.half-shot.msc2778.login.application_service\"},", "= self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def test_login_jwt_invalid_signature(self)", "LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_soft_logout(self) ->", "self.assertEqual(params[2][0], \"loginToken\") # finally, submit the matrix login token to", "login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": AS_USER}, } channel = self.make_request(", "# This key's pubkey is used as the jwt_secret setting", "-> HomeServer: self.hs = self.setup_test_homeserver() self.service = ApplicationService( id=\"unique_identifier\", token=\"some_token\",", "saml_uri = location_headers[0] saml_uri_path, saml_uri_query = saml_uri.split(\"?\", 1) # it", "prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None: self.deactivate_account_handler", "should redirect to the CAS server\"\"\" channel = self.make_request( \"GET\",", "{\"flows\", \"params\", \"session\"}, channel.result, ) auth = { \"type\": \"m.login.password\",", "def default_config(self) -> Dict[str, Any]: config = super().default_config() config[\"jwt_config\"] =", "with the License. # You may obtain a copy of", "self._test_redirect(redirect_url) # Deactivate the account. self.get_success( self.deactivate_account_handler.deactivate_account( self.user_id, False, create_requester(self.user_id)", "tests.rest.client.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG from tests.server import FakeChannel from tests.test_utils.html_parsers", "to set as the mock's return value) because the corresponding", "# that should serve a confirmation page self.assertEqual(channel.code, 200, channel.result)", "# ... but if we delete that device, it will", "\"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=saml\", ) self.assertEqual(channel.code, 302, channel.result) location_headers", "\"https://x\") # it will have url-encoded the params properly, so", "validation failed: Invalid audience\" ) # Not providing an audience.", "None: self.deactivate_account_handler = hs.get_deactivate_account_handler() def test_cas_redirect_confirm(self) -> None: \"\"\"Tests that", "missing the \"aud\" claim', ) def test_login_aud_no_config(self) -> None: \"\"\"Test", "= ApplicationService( id=\"another__identifier\", token=\"another_token\", hostname=\"example.com\", sender=\"@as2bot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\":", "username picker, which should serve a redirect # to the", "-> None: \"\"\"If the client tries to pick an unknown", "864000}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation", "flow for flow in channel.json_body[\"flows\"]} self.assertCountEqual( flows[\"m.login.sso\"][\"identity_providers\"], [ {\"id\": \"cas\",", "a CAS server, copied from https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20 This needs to be", ") self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers oidc_uri", "params = {\"type\": \"org.matrix.login.jwt\", \"token\": self.jwt_encode(*args)} channel = self.make_request(b\"POST\", LOGIN_URL,", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_MISSING_TOKEN\") # log in as normal params =", "self.assertIn(redirect_url, channel.result[\"body\"].decode(\"UTF-8\")) @override_config( { \"sso\": { \"client_whitelist\": [ \"https://legit-site.com/\", \"https://other-site.com/\",", "login\"\"\" servlets = [login.register_servlets] def default_config(self) -> Dict[str, Any]: config", ") flows = {flow[\"type\"]: flow for flow in channel.json_body[\"flows\"]} self.assertCountEqual(", "retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) # Since we're", "call to the `/proxyValidate` endpoint of a CAS server, copied", "sure not to be bothered by the per-user # ratelimiter.", "it in the configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"})", "self.assertEqual(channel.json_body[\"error\"], \"Invalid JWT\") @override_config({\"jwt_config\": {**base_config, \"issuer\": \"test-issuer\"}}) def test_login_iss(self) ->", "caveat in macaroon.caveats: if caveat.caveat_id.startswith(prefix): return caveat.caveat_id[len(prefix) :] raise ValueError(\"No", "return caveat.caveat_id[len(prefix) :] raise ValueError(\"No %s caveat in macaroon\" %", "CAS and serve the template. channel = self.make_request(\"GET\", cas_ticket_url) self.assertEqual(channel.code,", "request to the completion page, which should 302 to the", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # # test behaviour after", "[ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ] jwt_secret = \"secret\" jwt_algorithm = \"HS256\"", "it should redirect us to the auth page of the", "soft-logouted channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "auth}, ) self.assertEqual(channel.code, 200, channel.result) @override_config({\"session_lifetime\": \"24h\"}) def test_session_can_hard_logout_after_being_soft_logged_out(self) ->", "foo.key -pubout`, with the the above # private key placed", "twisted.web.resource import Resource import synapse.rest.admin from synapse.appservice import ApplicationService from", "id=\"another__identifier\", token=\"another_token\", hostname=\"example.com\", sender=\"@as2bot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as2_user.*\", \"exclusive\":", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # Now try to hard", "express or implied. # See the License for the specific", "= self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\")", "self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"subject_claim\":", "{\"regex\": r\"@as_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], },", "except in compliance with the License. # You may obtain", "CAS_SERVER = \"https://fake.test\" # just enough to tell pysaml2 where", "def test_client_idp_redirect_to_oidc(self) -> None: \"\"\"If the client pick a known", "= self.make_request(\"GET\", cas_ticket_url) # Test that the response is HTML.", "test_POST_ratelimiting_per_address(self) -> None: # Create different users so we're sure", "-> None: \"\"\"Test that an appservice user can use /login\"\"\"", "so we'll have to parse them params = urllib.parse.parse_qsl( query,", "= 'https://x?<ab c>&q\"+%3D%2B\"=\"fö%26=o\"' # the query params in TEST_CLIENT_REDIRECT_URL EXPECTED_CLIENT_REDIRECT_URL_PARAMS", "<cas:proxy>https://proxy2/pgtUrl</cas:proxy> <cas:proxy>https://proxy1/pgtUrl</cas:proxy> </cas:proxies> </cas:authenticationSuccess> </cas:serviceResponse> \"\"\" % cas_user_id ).encode(\"utf-8\") mocked_http_client", "verification failed\", ) def test_login_jwt_expired(self) -> None: channel = self.jwt_login({\"sub\":", "self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: The token is not yet", "should redirect us to the login page of the SAML", "HomeServer: self.base_url = \"https://matrix.goodserver.com/\" self.redirect_path = \"_synapse/client/login/sso/redirect/confirm\" config = self.default_config()", "channel.result) # check it's a UI-Auth fail self.assertEqual( set(channel.json_body.keys()), {\"flows\",", "replace it. if config.get(\"jwt_config\") is None: config[\"jwt_config\"] = self.base_config return", "Dict[str, Any]: config = super().default_config() # If jwt_config has been", "TEST_OIDC_AUTH_ENDPOINT) # ... and should have set a cookie including", "channel.result) def test_login_appservice_no_token(self) -> None: \"\"\"Test that users must provide", "token out of the returned redirect uri login_token = params[2][1]", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "* 60 * 1000) self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000) # Now, submit", "from synapse.rest.synapse.client import build_synapse_client_resource_tree from synapse.server import HomeServer from synapse.types", "self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # finally, submit the matrix login", "it. if config.get(\"jwt_config\") is None: config[\"jwt_config\"] = self.base_config return config", "LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_wrong_as(self) ->", "link self.assertIn(redirect_url, channel.result[\"body\"].decode(\"UTF-8\")) @override_config( { \"sso\": { \"client_whitelist\": [ \"https://legit-site.com/\",", "= dict(urllib.parse.parse_qsl(url_parts[4])) query.update({\"redirectUrl\": redirect_url}) query.update({\"ticket\": \"ticket\"}) url_parts[4] = urllib.parse.urlencode(query) cas_ticket_url", "\"JWT validation failed: The token is not yet valid (nbf)\",", "{ \"SAML_SERVER\": SAML_SERVER, } LOGIN_URL = b\"/_matrix/client/r0/login\" TEST_URL = b\"/_matrix/client/r0/account/whoami\"", "{ \"enabled\": True, \"server_url\": CAS_SERVER, } cas_user_id = \"username\" self.user_id", "create_resource_dict(self) -> Dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d", "it rather than # serving a confirmation page config[\"sso\"] =", "synapse server name: used to populate public_baseurl in some tests", "hit the redirect url, which should redirect to our idp", "audience. channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "to make requests without an access token channel = self.make_request(b\"GET\",", "self.hs def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) ->", "p.links: path, query = link.split(\"?\", 1) self.assertEqual(path, \"pick_idp\") params =", "the login token out of the returned redirect uri login_token", "CONDITIONS OF ANY KIND, either express or implied. # See", "fake CAS and serve the template. channel = self.make_request(\"GET\", cas_ticket_url)", "login.register_servlets, ] jwt_secret = \"secret\" jwt_algorithm = \"HS256\" base_config =", "str] = {} for h in cookie_headers: key, value =", "failed\", ) AS_USER = \"as_user_alice\" class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [", "return a 404\"\"\" channel = self._make_sso_redirect_request(\"xxx\") self.assertEqual(channel.code, 404, channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "{ \"enabled\": True, \"secret\": self.jwt_pubkey, \"algorithm\": \"RS256\", } return config", "if caveat.caveat_id.startswith(prefix): return caveat.caveat_id[len(prefix) :] raise ValueError(\"No %s caveat in", "ones ADDITIONAL_LOGIN_FLOWS = [ {\"type\": \"m.login.application_service\"}, {\"type\": \"uk.half-shot.msc2778.login.application_service\"}, ] class", "flow in channel.json_body[\"flows\"]} self.assertCountEqual( flows[\"m.login.sso\"][\"identity_providers\"], [ {\"id\": \"cas\", \"name\": \"CAS\"},", "def test_session_can_hard_logout_after_being_soft_logged_out(self) -> None: self.register_user(\"kermit\", \"monkey\") # log in as", "by an async function (as opposed to set as the", "\"\"\"If the client pick a known IdP, redirect to it\"\"\"", ") # Request the CAS ticket. cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\"", "content={\"type\": \"m.login.token\", \"token\": login_token}, ) self.assertEqual(chan.code, 200, chan.result) self.assertEqual(chan.json_body[\"user_id\"], \"@bobby:test\")", "1min. self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms / 1000.0 + 1.0) params", "hostname=\"example.com\", sender=\"@asbot:example.com\", namespaces={ ApplicationService.NS_USERS: [ {\"regex\": r\"@as_user.*\", \"exclusive\": False} ],", "channel = self.make_request(b\"GET\", TEST_URL) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_MISSING_TOKEN\") #", "\"address\": {\"per_second\": 0.17, \"burst_count\": 5}, # Prevent the account login", "\"burst_count\": 5}, } } ) def test_POST_ratelimiting_per_account_failed_attempts(self) -> None: self.register_user(\"kermit\",", "self.make_request(b\"POST\", \"/logout/all\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_with_overly_long_device_id_fails(self) -> None:", "cookie_headers = channel.headers.getRawHeaders(\"Set-Cookie\") assert cookie_headers cookies: Dict[str, str] = {}", "\"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=oidc\", ) self.assertEqual(channel.code, 302, channel.result) location_headers", "the configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\",", "with a public key configured in synapse as \"jwt_secret\", and", "\"\"\"Test that users must provide a token when using the", "provider \"\"\" endpoint = \"/_matrix/client/r0/login/sso/redirect\" if idp_prov is not None:", "@skip_unless(HAS_JWT, \"requires jwt\") class JWTPubKeyTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, ]", "JWTTestCase where we instead use # RSS256, with a public", "a link for each href returned_idps: List[str] = [] for", "HAS_JWT = False # synapse server name: used to populate", "example response payload from a call to the `/proxyValidate` endpoint", "access token, mxid, and device id. chan = self.make_request( \"POST\",", "\"org.matrix.login.jwt\"} channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "[ \"m.login.cas\", \"m.login.sso\", \"m.login.token\", \"m.login.password\", ] + [f[\"type\"] for f", "changed the return type of jwt.encode from bytes to str.", "serves a redirect to a whitelisted url\"\"\" self._test_redirect(\"https://legit-site.com/\") @override_config({\"public_baseurl\": \"https://example.com\"})", "AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, register.register_servlets, ] def make_homeserver(self, reactor:", "as a deactivated account should error.\"\"\" redirect_url = \"https://legit-site.com/\" #", "self.assertEqual(chan.code, 200, chan.result) self.assertEqual(chan.json_body[\"user_id\"], \"@user1:test\") def test_multi_sso_redirect_to_unknown(self) -> None: \"\"\"An", "None: \"\"\"Perform the UI-Auth to delete a device\"\"\" channel =", "&, %2B is + TEST_CLIENT_REDIRECT_URL = 'https://x?<ab c>&q\"+%3D%2B\"=\"fö%26=o\"' # the", "query params in TEST_CLIENT_REDIRECT_URL EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [(\"<ab c>\", \"\"), ('q\"", "signed by the private key. @skip_unless(HAS_JWT, \"requires jwt\") class JWTPubKeyTestCase(unittest.HomeserverTestCase):", "c>&q\"+%3D%2B\"=\"fö%26=o\"' # the query params in TEST_CLIENT_REDIRECT_URL EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [(\"<ab", "\"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], False) def _delete_device( self, access_token: str, user_id: str,", "the user's sessions channel = self.make_request(b\"POST\", \"/logout/all\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\",", "self.jwt_login({\"sub\": \"frog\", \"nbf\": now + 3600}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: The token is", "and tokens # signed by the private key. @skip_unless(HAS_JWT, \"requires", "is missing\") # The JWTPubKeyTestCase is a complement to JWTTestCase", "self.jwt_login({\"username\": \"root\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Invalid JWT\")", "\"https://legit-site.com/\" # First login (to create the user). self._test_redirect(redirect_url) #", "</cas:proxies> </cas:authenticationSuccess> </cas:serviceResponse> \"\"\" % cas_user_id ).encode(\"utf-8\") mocked_http_client = Mock(spec=[\"get_raw\"])", "\"tester\") self.assertEqual(session.display_name, \"Jonny\") self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL) # the expiry time should", "device id. login_token = params[2][1] chan = self.make_request( \"POST\", \"/login\",", "= [ \"m.login.cas\", \"m.login.sso\", \"m.login.token\", \"m.login.password\", ] + [f[\"type\"] for", "create a device_id longer than 512 characters device_id = \"yolo\"", "macaroon\" % (key,)) class CASTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, ]", "\"kermit\", \"monkey\", device_id) channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401,", "\"rc_login\": { \"account\": {\"per_second\": 0.17, \"burst_count\": 5}, # Prevent the", "200, channel.result) access_token = channel.json_body[\"access_token\"] device_id = channel.json_body[\"device_id\"] # we", "\"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.code, 200, channel.result)", "that the body isn't empty. self.assertTrue(len(channel.result[\"body\"]) > 0) # And", "channel.json_body[\"error\"], \"JWT validation failed: Signature verification failed\", ) AS_USER =", "TEST_OIDC_CONFIG from tests.server import FakeChannel from tests.test_utils.html_parsers import TestHtmlParser from", "the start of the login flow channel = self.helper.auth_via_oidc( {\"sub\":", "{ \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": self.service.sender}, } channel", "= chan.headers.getRawHeaders(\"Location\") assert location_headers # send a request to the", "= self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=oidc\", ) self.assertEqual(channel.code,", "sso handler a bit to check that the username mapping", "token should still return a soft-logout self.reactor.advance(3600) channel = self.make_request(b\"GET\",", "-> None: channel = self.jwt_login({\"username\": \"root\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "args: Any) -> bytes: \"\"\"Return an example response payload from", "synapse.util import Clock from tests import unittest from tests.handlers.test_oidc import", "= \"/_matrix/client/r0/login/sso/redirect\" if idp_prov is not None: endpoint += \"/\"", "is normally covered by the default test homeserver config #", "enabled\"\"\" servlets = [ login.register_servlets, ] def default_config(self) -> Dict[str,", "keep_blank_values=True, strict_parsing=True, errors=\"strict\" ) self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS) self.assertEqual(params[2][0], \"loginToken\") # fish", "configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\", \"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Token field for JWT is missing\")", "[], ApplicationService.NS_ALIASES: [], }, ) self.hs.get_datastores().main.services_cache.append(self.service) self.hs.get_datastores().main.services_cache.append(self.another_service) return self.hs def", "audience without requiring it in the configuration.\"\"\" channel = self.jwt_login({\"sub\":", "unittest from tests.handlers.test_oidc import HAS_OIDC from tests.handlers.test_saml import has_saml2 from", "\"mickey\", \"password\": \"<PASSWORD>\", \"device_id\": device_id, } # make a login", "\"subject_claim\": \"username\"}}) def test_login_custom_sub(self) -> None: \"\"\"Test reading user ID", "[f[\"type\"] for f in channel.json_body[\"flows\"]], expected_flow_types ) flows = {flow[\"type\"]:", "# do the start of the login flow channel =", "</md:EntityDescriptor> \"\"\" % { \"SAML_SERVER\": SAML_SERVER, } LOGIN_URL = b\"/_matrix/client/r0/login\"", "clock: Clock, hs: HomeServer) -> None: self.deactivate_account_handler = hs.get_deactivate_account_handler() def", "username_mapping_sessions, \"session id not found in map\", ) session =", "\"POST\", \"/login\", content={\"type\": \"m.login.token\", \"token\": login_token}, ) self.assertEqual(chan.code, 200, chan.result)", "# # test behaviour after deleting the expired device #", "the cas server self.assertEqual(cas_uri_path, CAS_SERVER + \"/login\") # check that", "None: \"\"\"Test providing an issuer claim without requiring it in", "an http:// scheme because # FakeChannel.isSecure() returns False, so synapse", "SAML2 and OIDC\") class MultiSSOTestCase(unittest.HomeserverTestCase): \"\"\"Tests for homeservers with multiple", "config = super().default_config() config[\"public_baseurl\"] = BASE_URL config[\"cas_config\"] = { \"enabled\":", "redirect to the SAML server\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\"", "a proper logout self._delete_device(access_token_2, \"kermit\", \"monkey\", device_id) channel = self.make_request(b\"GET\",", "param - ie, the # place that CAS will redirect", "to it rather than # serving a confirmation page config[\"sso\"]", "key. @skip_unless(HAS_JWT, \"requires jwt\") class JWTPubKeyTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets,", "List[str] = [] for link in p.links: path, query =", "first hit the redirect url, which should redirect to our", "has been defined (eg via @override_config), don't replace it. if", "the login fails with the correct error code self.assertEqual(channel.code, 400)", "the redirect url cookie_headers = channel.headers.getRawHeaders(\"Set-Cookie\") assert cookie_headers cookies: Dict[str,", "is used to carry the client redirect url saml_uri_params =", "- ie, the # place that CAS will redirect to", "\"\"\" return ( \"\"\" <cas:serviceResponse xmlns:cas='http://www.yale.edu/tp/cas'> <cas:authenticationSuccess> <cas:user>%s</cas:user> <cas:proxyGrantingTicket>PGTIOU-84678-8a9d...</cas:proxyGrantingTicket> <cas:proxies>", "\"m.id.user\", \"user\": \"kermit\" + str(i)}, \"password\": \"<PASSWORD>\", } channel =", "302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers # send a", "PUBLIC KEY-----\", ] ) # This key is used to", "# the RelayState is used to carry the client redirect", "shouldn't be able to make requests without an access token", "self.assertEqual(channel.json_body[\"soft_logout\"], False) def _delete_device( self, access_token: str, user_id: str, password:", "error template. self.assertEqual(channel.code, 403) self.assertIn(b\"SSO account deactivated\", channel.result[\"body\"]) @skip_unless(HAS_JWT, \"requires", "channel def test_login_jwt_valid_registered(self) -> None: self.register_user(\"kermit\", \"monkey\") channel = self.jwt_login({\"sub\":", "\"\"\"If SAML is chosen, should redirect to the SAML server\"\"\"", "deactivated account should error.\"\"\" redirect_url = \"https://legit-site.com/\" # First login", "True) # # test behaviour after deleting the expired device", "302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers picker_url = location_headers[0]", "\"exp\": 864000}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT", "use # RSS256, with a public key configured in synapse", "}, } # default OIDC provider config[\"oidc_config\"] = TEST_OIDC_CONFIG #", "= self.hs.get_sso_handler()._username_mapping_sessions self.assertIn( session_id, username_mapping_sessions, \"session id not found in", "finally, submit the matrix login token to the login API,", "deactivated they are served an error template. self.assertEqual(channel.code, 403) self.assertIn(b\"SSO", "base_config = { \"enabled\": True, \"secret\": jwt_secret, \"algorithm\": jwt_algorithm, }", "RelayState is used to carry the client redirect url saml_uri_params", ") @staticmethod def _get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str) -> str: prefix", "test_username_picker(self) -> None: \"\"\"Test the happy path of a username", "it\"\"\" channel = self._make_sso_redirect_request(\"oidc\") self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\")", "where to redirect to SAML_SERVER = \"https://test.saml.server/idp/sso\" TEST_SAML_METADATA = \"\"\"", "= location_headers[0] cas_uri_path, cas_uri_query = cas_uri.split(\"?\", 1) # it should", "}, ) self.another_service = ApplicationService( id=\"another__identifier\", token=\"another_token\", hostname=\"example.com\", sender=\"@as2bot:example.com\", namespaces={", "= header[1].decode(\"utf8\") self.assertTrue(content_type_header_value.startswith(\"text/html\")) # Test that the body isn't empty.", "Dict[str, Any], secret: str = jwt_secret) -> str: # PyJWT", "str(len(content))), ], ) self.assertEqual(chan.code, 302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert", "channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config( {", "and SSO flows\"\"\" channel = self.make_request(\"GET\", \"/_matrix/client/r0/login\") self.assertEqual(channel.code, 200, channel.result)", "access token, mxid, and device id. login_token = params[2][1] chan", "# ... and should have set a cookie including the", "# looks ok. username_mapping_sessions = self.hs.get_sso_handler()._username_mapping_sessions self.assertIn( session_id, username_mapping_sessions, \"session", "IdP, redirect to it\"\"\" channel = self._make_sso_redirect_request(\"oidc\") self.assertEqual(channel.code, 302, channel.result)", "class CASTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, ] def make_homeserver(self, reactor:", "create_requester(self.user_id) ) ) # Request the CAS ticket. cas_ticket_url =", "6): params = { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\":", "self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # Now try to hard logout", "is =, %26 is &, %2B is + TEST_CLIENT_REDIRECT_URL =", "AS_USER}, } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"401\", channel.result)", "\"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT", "\"M_NOT_FOUND\") def test_client_idp_redirect_to_oidc(self) -> None: \"\"\"If the client pick a", "is not yet valid (nbf)\", ) def test_login_no_sub(self) -> None:", "b\"200\", channel.result) def test_login_appservice_user_bot(self) -> None: \"\"\"Test that the appservice", "to the username picker self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\")", "} # whitelist this client URI so we redirect straight", "config = self.default_config() config[\"public_baseurl\"] = ( config.get(\"public_baseurl\") or \"https://matrix.goodserver.com:8448\" )", "devices.register_servlets, lambda hs, http_server: WhoamiRestServlet(hs).register(http_server), ] def make_homeserver(self, reactor: MemoryReactor,", "TestHtmlParser from tests.unittest import HomeserverTestCase, override_config, skip_unless try: import jwt", "access_token=access_token ) self.assertEqual(channel.code, 401, channel.result) # check it's a UI-Auth", "test_login_jwt_not_before(self) -> None: now = int(time.time()) channel = self.jwt_login({\"sub\": \"frog\",", "= { \"enabled\": True, \"server_url\": CAS_SERVER, } cas_user_id = \"username\"", "we'll have to parse them params = urllib.parse.parse_qsl( query, keep_blank_values=True,", "self.make_request(b\"POST\", LOGIN_URL, params) if i == 5: self.assertEqual(channel.result[\"code\"], b\"429\", channel.result)", "returned redirect uri login_token = params[2][1] # finally, submit the", "if idp_prov is not None: endpoint += \"/\" + idp_prov", "to delete a device\"\"\" channel = self.make_request( b\"DELETE\", \"devices/\" +", "channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") def", "-> None: channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"],", "\"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # # test behaviour after deleting the", "path, query = p.links[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") # it will", "-> None: channel = self.jwt_login({\"sub\": \"frog\"}, self.bad_privatekey) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "\"frog\", \"nbf\": now + 3600}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\")", "\"m.id.user\", \"user\": AS_USER}, } channel = self.make_request( b\"POST\", LOGIN_URL, params,", "False) def _delete_device( self, access_token: str, user_id: str, password: str,", "1) # it should redirect us to the login page", "in some tests SYNAPSE_SERVER_PUBLIC_HOSTNAME = \"synapse\" # public_baseurl for some", ") def test_POST_ratelimiting_per_account_failed_attempts(self) -> None: self.register_user(\"kermit\", \"<PASSWORD>\") for i in", "jwt_secret = \"secret\" jwt_algorithm = \"HS256\" base_config = { \"enabled\":", "# And that it contains our redirect link self.assertIn(redirect_url, channel.result[\"body\"].decode(\"UTF-8\"))", "access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # #", "to call the fake CAS and serve the template. channel", "= self.jwt_login({\"username\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_no_token(self)", "in as a different device access_token_2 = self.login(\"kermit\", \"monkey\") #", "\"@frog:test\") def test_login_no_token(self) -> None: params = {\"type\": \"org.matrix.login.jwt\"} channel", "key + \" = \" for caveat in macaroon.caveats: if", "self.assertEqual(channel.code, 200, channel.result) expected_flow_types = [ \"m.login.cas\", \"m.login.sso\", \"m.login.token\", \"m.login.password\",", "we need to set this manually as well \"address\": {\"per_second\":", "Foundation C.I.C. # # Licensed under the Apache License, Version", "cas_ticket_url) # Test that the response is HTML. self.assertEqual(channel.code, 200,", "normal access_token = self.login(\"kermit\", \"monkey\") # we should now be", "True self.hs.config.registration.registrations_require_3pid = [] self.hs.config.registration.auto_join_rooms = [] self.hs.config.captcha.enable_registration_captcha = False", "\"/_matrix/client/v3/login\", json.dumps(body).encode(\"utf8\"), custom_headers=None, ) # test that the login fails", "\"service_url\": \"https://matrix.goodserver.com:8448\", } config[\"saml2_config\"] = { \"sp_config\": { \"metadata\": {\"inline\":", "users must provide a token when using the appservice login", "as normal params = { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\",", "] } } ) def test_cas_redirect_whitelisted(self) -> None: \"\"\"Tests that", "OIDC is chosen, should redirect to the OIDC auth endpoint\"\"\"", "don't replace it. if config.get(\"jwt_config\") is None: config[\"jwt_config\"] = self.base_config", "<md:IDPSSODescriptor protocolSupportEnumeration=\"urn:oasis:names:tc:SAML:2.0:protocol\"> <md:SingleSignOnService Binding=\"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect\" Location=\"%(SAML_SERVER)s\"/> </md:IDPSSODescriptor> </md:EntityDescriptor> \"\"\" % {", "channel.json_body[\"flows\"]} self.assertCountEqual( flows[\"m.login.sso\"][\"identity_providers\"], [ {\"id\": \"cas\", \"name\": \"CAS\"}, {\"id\": \"saml\",", "channel.result) retry_after_ms = int(channel.json_body[\"retry_after_ms\"]) else: self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) # Since", "\"burst_count\": 10000}, } } ) def test_POST_ratelimiting_per_address(self) -> None: #", "config[\"saml2_config\"] = { \"sp_config\": { \"metadata\": {\"inline\": [TEST_SAML_METADATA]}, # use", "should serve a confirmation page self.assertEqual(channel.code, 200, channel.result) content_type_headers =", "Clock) -> HomeServer: self.base_url = \"https://matrix.goodserver.com/\" self.redirect_path = \"_synapse/client/login/sso/redirect/confirm\" config", "of a CAS server, copied from https://apereo.github.io/cas/5.0.x/protocol/CAS-Protocol-V2-Specification.html#26-proxyvalidate-cas-20 This needs to", ") def test_login_no_sub(self) -> None: channel = self.jwt_login({\"username\": \"root\"}) self.assertEqual(channel.result[\"code\"],", "def get_raw(uri: str, args: Any) -> bytes: \"\"\"Return an example", "the SSO login flow serves a redirect for the given", "away expected_expiry = self.clock.time_msec() + (15 * 60 * 1000)", "proper logout self._delete_device(access_token_2, \"kermit\", \"monkey\", device_id) channel = self.make_request(b\"GET\", TEST_URL,", "\"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": AS_USER}, } channel =", "\"\"\"Test reading user ID from a custom subject claim.\"\"\" channel", "# RSS256, with a public key configured in synapse as", "self.assertEqual(channel.json_body[\"errcode\"], \"M_NOT_FOUND\") def test_client_idp_redirect_to_oidc(self) -> None: \"\"\"If the client pick", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "be accepted by synapse. # Generated just like jwt_privatekey. bad_privatekey", "this manually as well \"address\": {\"per_second\": 10000, \"burst_count\": 10000}, }", "... and we should be soft-logouted channel = self.make_request(b\"GET\", TEST_URL,", ") self.assertEqual(channel.code, 401, channel.result) # check it's a UI-Auth fail", "MemoryReactor, clock: Clock) -> HomeServer: self.hs = self.setup_test_homeserver() self.hs.config.registration.enable_registration =", "self.register_user(\"kermit\", \"<PASSWORD>\") for i in range(0, 6): params = {", "redirect to an identity picker\"\"\" # first hit the redirect", "= channel.headers.getRawHeaders(\"Set-Cookie\") assert cookie_headers cookies: Dict[str, str] = {} for", "as normal access_token = self.login(\"kermit\", \"monkey\") # we should now", "base_url = \"/_matrix/client/r0/login/cas/ticket?redirectUrl\" redirect_url = \"https://dodgy-site.com/\" url_parts = list(urllib.parse.urlparse(base_url)) query", "there should be a link for each href returned_idps: List[str]", "= self.clock.time_msec() + (15 * 60 * 1000) self.assertApproximates(session.expiry_time_ms, expected_expiry,", "the login page of the SAML server self.assertEqual(saml_uri_path, SAML_SERVER) #", "# additional OIDC providers config[\"oidc_providers\"] = [ { \"idp_id\": \"idp1\",", "versions of twisted don't do form-parsing without a valid #", "b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(channel.json_body[\"error\"], \"Token field for JWT is", "async def get_raw(uri: str, args: Any) -> bytes: \"\"\"Return an", "to parse them params = urllib.parse.parse_qsl( query, keep_blank_values=True, strict_parsing=True, errors=\"strict\"", "bad_privatekey = \"\\n\".join( [ \"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\", \"gLjmQD3jBUTz+/FndLSBvr3F4OHtGL9O/osCAwEAAQJAJqH0jZJW7Smzo9ShP02L\",", "return result def jwt_login(self, *args: Any) -> FakeChannel: params =", "request with the bad device_id channel = self.make_request( \"POST\", \"/_matrix/client/v3/login\",", "redirect URL.\"\"\" cas_ticket_url = ( \"/_matrix/client/r0/login/cas/ticket?redirectUrl=%s&ticket=ticket\" % (urllib.parse.quote(redirect_url)) ) #", "\"username\"}}) def test_login_custom_sub(self) -> None: \"\"\"Test reading user ID from", "\"user\": AS_USER}, } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"401\",", "they are served an error template. self.assertEqual(channel.code, 403) self.assertIn(b\"SSO account", "still return a soft-logout self.reactor.advance(3600) channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token)", "rather than # serving a confirmation page config[\"sso\"] = {\"client_whitelist\":", "b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"subject_claim\": \"username\"}}) def test_login_custom_sub(self)", "than 1min. self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms / 1000.0 + 1.0)", "channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual(", "class MultiSSOTestCase(unittest.HomeserverTestCase): \"\"\"Tests for homeservers with multiple SSO providers enabled\"\"\"", "validation failed: Token is missing the \"iss\" claim', ) def", "channel = self.make_request( \"POST\", \"/_matrix/client/v3/login\", json.dumps(body).encode(\"utf8\"), custom_headers=None, ) # test", "None: now = int(time.time()) channel = self.jwt_login({\"sub\": \"frog\", \"nbf\": now", "self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], 'JWT", "self.assertEqual(path, \"https://x\") # it will have url-encoded the params properly,", "self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config( { \"rc_login\": {", "[ {\"type\": \"m.login.application_service\"}, {\"type\": \"uk.half-shot.msc2778.login.application_service\"}, ] class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets =", "SYNAPSE_SERVER_PUBLIC_HOSTNAME)], ) @staticmethod def _get_value_from_macaroon(macaroon: pymacaroons.Macaroon, key: str) -> str:", "set this manually as well \"address\": {\"per_second\": 10000, \"burst_count\": 10000},", "} } ) def test_POST_ratelimiting_per_account_failed_attempts(self) -> None: self.register_user(\"kermit\", \"<PASSWORD>\") for", "tests.handlers.test_saml import has_saml2 from tests.rest.client.utils import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG from tests.server", "a 400\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz\", ) self.assertEqual(channel.code, 400,", "caveat.caveat_id[len(prefix) :] raise ValueError(\"No %s caveat in macaroon\" % (key,))", "60 * 1000) self.assertApproximates(session.expiry_time_ms, expected_expiry, tolerance=1000) # Now, submit a", "\"test-issuer\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") # An invalid issuer.", "channel = self.make_request(b\"POST\", \"/logout\", access_token=access_token) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config({\"session_lifetime\": \"24h\"})", "a public key configured in synapse as \"jwt_secret\", and tokens", "[\"profile\"], \"authorization_endpoint\": \"https://issuer1/auth\", \"token_endpoint\": \"https://issuer1/token\", \"userinfo_endpoint\": \"https://issuer1/userinfo\", \"user_mapping_provider\": { \"config\":", "{ \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": AS_USER}, } channel", "Any], secret: str = jwt_privatekey) -> str: # PyJWT 2.0.0", "in p.links: path, query = link.split(\"?\", 1) self.assertEqual(path, \"pick_idp\") params", "from twisted.web.resource import Resource import synapse.rest.admin from synapse.appservice import ApplicationService", "we should now be able to make requests with the", "= cookies[\"oidc_session\"] macaroon = pymacaroons.Macaroon.deserialize(oidc_session_cookie) self.assertEqual( self._get_value_from_macaroon(macaroon, \"client_redirect_url\"), TEST_CLIENT_REDIRECT_URL, )", "\"metadata\": {\"inline\": [TEST_SAML_METADATA]}, # use the XMLSecurity backend to avoid", "True) # Now try to hard logout this session channel", "h.split(\";\")[0].split(\"=\", maxsplit=1) cookies[key] = value oidc_session_cookie = cookies[\"oidc_session\"] macaroon =", "redirect_url = \"https://legit-site.com/\" # First login (to create the user).", "self.make_request( \"GET\", path=location_headers[0], custom_headers=[(\"Cookie\", \"username_mapping_session=\" + session_id)], ) self.assertEqual(chan.code, 302,", "# create a device_id longer than 512 characters device_id =", "-> None: \"\"\"Test validating the audience claim.\"\"\" # A valid", "200, channel.result) expected_flow_types = [ \"m.login.cas\", \"m.login.sso\", \"m.login.token\", \"m.login.password\", ]", "self.base_config return config def jwt_encode(self, payload: Dict[str, Any], secret: str", "type of jwt.encode from bytes to str. result: Union[bytes, str]", "stops Synapse trying to redirect to # https://.... BASE_URL =", "Version 2.0 (the \"License\"); # you may not use this", "\"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=oidc\", ) self.assertEqual(channel.code, 302, channel.result)", "because the corresponding Synapse code awaits on it. \"\"\" return", "None: channel = self.jwt_login({\"username\": \"root\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\")", "-> None: \"\"\"If OIDC is chosen, should redirect to the", "6000) self.reactor.advance(retry_after_ms / 1000.0) params = { \"type\": \"m.login.password\", \"identifier\":", "= channel.json_body[\"access_token\"] device_id = channel.json_body[\"device_id\"] # we should now be", "}}\"} }, } ] return config def create_resource_dict(self) -> Dict[str,", "\"monkey\") for i in range(0, 6): params = { \"type\":", ") # Not providing an audience. channel = self.jwt_login({\"sub\": \"kermit\"})", "will have url-encoded the params properly, so we'll have to", "entire # rc_login dict here, we need to set this", "self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL) def test_login_via_oidc(self) -> None: \"\"\"If OIDC is chosen,", "the SSO login flow serves a redirect to a whitelisted", "\"<KEY>KVaZ/gTOM9+9MwlmhidrUOweKfB/\", \"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq\", \"TpIN8A4MBKmfZMWTLq6z05y/qjKyxb0CIQDYJxCwTEenIaEa4PdoJl+qmXFasVDN\", \"ZU0+XtNV7yul0wIhAMI9IhiStIjS2EppBa6RSlk+t1oxh2gUWlIh+YVQfZGRAiEA\", \"tqBR7qLZGJ5CVKxWmNhJZGt1QHoUtOch8t9C4IdOZ2g=\", \"-----END RSA PRIVATE KEY-----\", ]", "params = { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": AS_USER},", "= self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=saml\", ) self.assertEqual(channel.code,", "oidc_uri = location_headers[0] oidc_uri_path, oidc_uri_query = oidc_uri.split(\"?\", 1) # it", "b\"200\", channel.result) @override_config( { \"rc_login\": { # Prevent the address", "an example response payload from a call to the `/proxyValidate`", "params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) def test_login_appservice_wrong_user(self) -> None:", "the bad device_id channel = self.make_request( \"POST\", \"/_matrix/client/v3/login\", json.dumps(body).encode(\"utf8\"), custom_headers=None,", "covered by the default test homeserver config # which sets", "lower # than 1min. self.assertTrue(retry_after_ms < 6000) self.reactor.advance(retry_after_ms / 1000.0)", "= super().default_config() config[\"jwt_config\"] = { \"enabled\": True, \"secret\": self.jwt_pubkey, \"algorithm\":", "AS_USER}, } channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token )", "KEY-----\", ] ) def default_config(self) -> Dict[str, Any]: config =", "-> None: \"\"\"/login/sso/redirect should redirect to an identity picker\"\"\" #", "params) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) @override_config( { \"rc_login\": { # Prevent", "= self.jwt_login({\"sub\": \"kermit\", \"iss\": \"test-issuer\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\")", "\"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"audiences\": [\"test-audience\"]}}) def test_login_aud(self) -> None: \"\"\"Test", ") config[\"cas_config\"] = { \"enabled\": True, \"server_url\": CAS_SERVER, } cas_user_id", "a confirmation page config[\"sso\"] = {\"client_whitelist\": [\"https://x\"]} return config def", "# ones ADDITIONAL_LOGIN_FLOWS = [ {\"type\": \"m.login.application_service\"}, {\"type\": \"uk.half-shot.msc2778.login.application_service\"}, ]", "b\"bobby\"}).encode(\"utf8\") chan = self.make_request( \"POST\", path=picker_url, content=content, content_is_form=True, custom_headers=[ (\"Cookie\",", "(\"Cookie\", \"username_mapping_session=\" + session_id), # old versions of twisted don't", "-> Dict[str, Any]: config = super().default_config() # If jwt_config has", "def test_login_with_overly_long_device_id_fails(self) -> None: self.register_user(\"mickey\", \"<PASSWORD>\") # create a device_id", "\"params\", \"session\"}, channel.result, ) auth = { \"type\": \"m.login.password\", #", "by applicable law or agreed to in writing, software #", "self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers uri =", "p.feed(channel.text_body) p.close() # ... which should contain our redirect link", "import Resource import synapse.rest.admin from synapse.appservice import ApplicationService from synapse.rest.client", "account deactivated\", channel.result[\"body\"]) @skip_unless(HAS_JWT, \"requires jwt\") class JWTTestCase(unittest.HomeserverTestCase): servlets =", "ID from a custom subject claim.\"\"\" channel = self.jwt_login({\"username\": \"frog\"})", "cas_uri_query = cas_uri.split(\"?\", 1) # it should redirect us to", "config def jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_privatekey)", "path, query = location_headers[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") # it will", "test_session_can_hard_logout_all_sessions_after_being_soft_logged_out( self, ) -> None: self.register_user(\"kermit\", \"monkey\") # log in", "= self.helper.complete_oidc_auth(oidc_uri, cookies, {\"sub\": \"user1\"}) # that should serve a", "@override_config( { \"rc_login\": { \"account\": {\"per_second\": 0.17, \"burst_count\": 5}, #", "\"Requires SAML2 and OIDC\") class MultiSSOTestCase(unittest.HomeserverTestCase): \"\"\"Tests for homeservers with", "chosen, should redirect to the OIDC auth endpoint\"\"\" # pick", "claim.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\")", "\"user_mapping_provider\": { \"config\": {\"localpart_template\": \"{{ user.sub }}\"} }, } ]", "# ... with a username_mapping_session cookie cookies: Dict[str, str] =", "200, channel.result) content_type_header_value = \"\" for header in channel.result.get(\"headers\", []):", "{ \"rc_login\": { # Prevent the address login ratelimiter from", "channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers picker_url = location_headers[0] self.assertEqual(picker_url,", "the normal # ones ADDITIONAL_LOGIN_FLOWS = [ {\"type\": \"m.login.application_service\"}, {\"type\":", "\"fibble_wibble\"}, } channel = self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token )", ") self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) def test_login_appservice_wrong_as(self) -> None: \"\"\"Test that", "\"\"\"Tests that the SSO login flow serves a redirect to", "servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource, login.register_servlets, ] jwt_secret = \"secret\" jwt_algorithm", "\"identifier\": {\"type\": \"m.id.user\", \"user\": \"fibble_wibble\"}, } channel = self.make_request( b\"POST\",", "test_login_jwt_valid(self) -> None: channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\") @override_config({\"jwt_config\": {**base_config, \"subject_claim\": \"username\"}}) def test_login_custom_sub(self) ->", "= self.default_config() config[\"public_baseurl\"] = ( config.get(\"public_baseurl\") or \"https://matrix.goodserver.com:8448\" ) config[\"cas_config\"]", "} config[\"saml2_config\"] = { \"sp_config\": { \"metadata\": {\"inline\": [TEST_SAML_METADATA]}, #", "Location=\"%(SAML_SERVER)s\"/> </md:IDPSSODescriptor> </md:EntityDescriptor> \"\"\" % { \"SAML_SERVER\": SAML_SERVER, } LOGIN_URL", "test that the login fails with the correct error code", "cookies: Dict[str, str] = {} channel.extract_cookies(cookies) self.assertIn(\"username_mapping_session\", cookies) session_id =", "config # which sets these values to 10000, but as", "the CAS server\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL)", "is not None: endpoint += \"/\" + idp_prov endpoint +=", "\"sso\": { \"client_whitelist\": [ \"https://legit-site.com/\", \"https://other-site.com/\", ] } } )", "to our idp picker channel = self._make_sso_redirect_request(None) self.assertEqual(channel.code, 302, channel.result)", "self.jwt_login({\"sub\": \"frog\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@frog:test\") def test_login_jwt_invalid_signature(self) ->", "= \"/_matrix/client/r0/login/cas/ticket?redirectUrl\" redirect_url = \"https://dodgy-site.com/\" url_parts = list(urllib.parse.urlparse(base_url)) query =", "\"\"\"Test reading user ID from the default subject claim.\"\"\" channel", "is chosen, should redirect to the CAS server\"\"\" channel =", "CAS and serve the template. channel = self.make_request(\"GET\", cas_ticket_url) #", "b\"401\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_MISSING_TOKEN\") # log in as normal params", "populate public_baseurl in some tests SYNAPSE_SERVER_PUBLIC_HOSTNAME = \"synapse\" # public_baseurl", "= get_raw self.hs = self.setup_test_homeserver( config=config, proxied_http_client=mocked_http_client, ) return self.hs", "\"JWT validation failed: Signature has expired\" ) def test_login_jwt_not_before(self) ->", "[(\"<ab c>\", \"\"), ('q\" =+\"', '\"fö&=o\"')] # (possibly experimental) login", "channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.code, 200, channel.result) access_token =", "2019-2021 The Matrix.org Foundation C.I.C. # # Licensed under the", "pubkey. It is generated # with `openssl genrsa 512` (not", "= self.make_request( b\"POST\", LOGIN_URL, params, access_token=self.service.token ) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result)", "+ \"/login\") # check that the redirectUrl is correctly encoded", "an unknown IdP, return a 404\"\"\" channel = self._make_sso_redirect_request(\"xxx\") self.assertEqual(channel.code,", "device access_token_2 = self.login(\"kermit\", \"monkey\") # more requests with the", "server\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=cas\",", "applicable law or agreed to in writing, software # distributed", "import TEST_OIDC_AUTH_ENDPOINT, TEST_OIDC_CONFIG from tests.server import FakeChannel from tests.test_utils.html_parsers import", "= Mock(spec=[\"get_raw\"]) mocked_http_client.get_raw.side_effect = get_raw self.hs = self.setup_test_homeserver( config=config, proxied_http_client=mocked_http_client,", "# # we now log in as a different device", "+ session_id)], ) self.assertEqual(chan.code, 302, chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert", "for link in p.links: path, query = link.split(\"?\", 1) self.assertEqual(path,", "from bytes to str. result: Union[str, bytes] = jwt.encode(payload, secret,", "failed: Invalid audience\" ) # Not providing an audience. channel", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid issuer\"", "[ {\"regex\": r\"@as2_user.*\", \"exclusive\": False} ], ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [],", "chan.result) location_headers = chan.headers.getRawHeaders(\"Location\") assert location_headers # send a request", "tests CAS_SERVER = \"https://fake.test\" # just enough to tell pysaml2", "else: self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) # Since we're ratelimiting at 1", "\"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) self.assertEqual(channel.result[\"code\"], b\"403\",", "channel.result) content_type_header_value = \"\" for header in channel.result.get(\"headers\", []): if", "return type of jwt.encode from bytes to str. result: Union[str,", "self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid audience\" ) # Not", "None: \"\"\"An unknown IdP should cause a 400\"\"\" channel =", "logout.register_servlets, devices.register_servlets, lambda hs, http_server: WhoamiRestServlet(hs).register(http_server), ] def make_homeserver(self, reactor:", "\"name\": \"CAS\"}, {\"id\": \"saml\", \"name\": \"SAML\"}, {\"id\": \"oidc-idp1\", \"name\": \"IDP1\"},", "it in the configuration.\"\"\" channel = self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"})", "self.assertEqual(channel.json_body[\"soft_logout\"], True) # # test behaviour after deleting the expired", "1) path, query = p.links[0].split(\"?\", 1) self.assertEqual(path, \"https://x\") # it", "@override_config( { \"sso\": { \"client_whitelist\": [ \"https://legit-site.com/\", \"https://other-site.com/\", ] }", "= urlencode({b\"username\": b\"bobby\"}).encode(\"utf8\") chan = self.make_request( \"POST\", path=picker_url, content=content, content_is_form=True,", "XMLSecurity backend to avoid relying on xmlsec1 \"crypto_backend\": \"XMLSecurity\", },", "empty. self.assertTrue(len(channel.result[\"body\"]) > 0) # And that it contains our", "test_login_with_overly_long_device_id_fails(self) -> None: self.register_user(\"mickey\", \"<PASSWORD>\") # create a device_id longer", "client pick a known IdP, redirect to it\"\"\" channel =", "= \"@%s:test\" % cas_user_id async def get_raw(uri: str, args: Any)", "device_id, access_token=access_token, content={\"auth\": auth}, ) self.assertEqual(channel.code, 200, channel.result) @override_config({\"session_lifetime\": \"24h\"})", "self.service = ApplicationService( id=\"unique_identifier\", token=\"some_token\", hostname=\"example.com\", sender=\"@asbot:example.com\", namespaces={ ApplicationService.NS_USERS: [", "page of the cas server self.assertEqual(cas_uri_path, CAS_SERVER + \"/login\") #", "to the username picker, which should serve a redirect #", "10000}, \"failed_attempts\": {\"per_second\": 0.17, \"burst_count\": 5}, } } ) def", "# the query params in TEST_CLIENT_REDIRECT_URL EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [(\"<ab c>\",", "\"identifier\": {\"type\": \"m.id.user\", \"user\": AS_USER}, } channel = self.make_request(b\"POST\", LOGIN_URL,", "the the above # private key placed in foo.key (jwt_privatekey).", "-> None: now = int(time.time()) channel = self.jwt_login({\"sub\": \"frog\", \"nbf\":", "AS_USER = \"as_user_alice\" class AppserviceLoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ login.register_servlets, register.register_servlets,", "= \"username\" self.user_id = \"@%s:test\" % cas_user_id async def get_raw(uri:", "None: \"\"\"Test validating the issuer claim.\"\"\" # A valid issuer.", "channel = self.jwt_login({\"sub\": \"frog\", \"exp\": 864000}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"],", "3600) # ... and we should be soft-logouted channel =", "# You may obtain a copy of the License at", "the expired device # # we now log in as", "import synapse.rest.admin from synapse.appservice import ApplicationService from synapse.rest.client import devices,", "returned_idps.append(params[\"idp\"][0]) self.assertCountEqual(returned_idps, [\"cas\", \"oidc\", \"oidc-idp1\", \"saml\"]) def test_multi_sso_redirect_to_cas(self) -> None:", "contain our redirect link self.assertEqual(len(p.links), 1) path, query = p.links[0].split(\"?\",", "302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers uri = location_headers[0]", "-> None: self.deactivate_account_handler = hs.get_deactivate_account_handler() def test_cas_redirect_confirm(self) -> None: \"\"\"Tests", "= \"https://dodgy-site.com/\" url_parts = list(urllib.parse.urlparse(base_url)) query = dict(urllib.parse.parse_qsl(url_parts[4])) query.update({\"redirectUrl\": redirect_url})", "test homeserver config # which sets these values to 10000,", "= self.jwt_login({\"sub\": \"frog\", \"nbf\": now + 3600}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "params = urllib.parse.parse_qsl( query, keep_blank_values=True, strict_parsing=True, errors=\"strict\" ) self.assertEqual(params[0:2], EXPECTED_CLIENT_REDIRECT_URL_PARAMS)", "Invalid audience\" ) # Not providing an audience. channel =", "have url-encoded the params properly, so we'll have to parse", "# it should redirect us to the auth page of", "b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], 'JWT validation failed: Token", "flow of SSO login\"\"\" servlets = [login.register_servlets] def default_config(self) ->", "ratelimiter from raising first # # This is normally covered", "before redirecting a user to the redirect URL. \"\"\" base_url", "that the username mapping session # looks ok. username_mapping_sessions =", "self.hs.config.registration.registrations_require_3pid = [] self.hs.config.registration.auto_join_rooms = [] self.hs.config.captcha.enable_registration_captcha = False return", "cause a 400\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=http://x&idp=xyz\", ) self.assertEqual(channel.code,", "to the client redirectUrl chan = self.make_request( \"GET\", path=location_headers[0], custom_headers=[(\"Cookie\",", "us some HTML channel = self.make_request(\"GET\", uri) self.assertEqual(channel.code, 200, channel.result)", "self.register_user(\"kermit\", \"monkey\") # log in as normal access_token = self.login(\"kermit\",", "providing an issuer. channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result)", "see the requested uri as # http://..., so using http", "Prevent the account login ratelimiter from raising first # #", "= saml_uri_params[\"RelayState\"][0] self.assertEqual(relay_state_param, TEST_CLIENT_REDIRECT_URL) def test_login_via_oidc(self) -> None: \"\"\"If OIDC", "This key is used to sign tokens that shouldn't be", "for f in ADDITIONAL_LOGIN_FLOWS] self.assertCountEqual( [f[\"type\"] for f in channel.json_body[\"flows\"]],", "the entire # rc_login dict here, we need to set", "secret: str = jwt_secret) -> str: # PyJWT 2.0.0 changed", "\"kermit\", \"aud\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"403\", channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"],", "in channel.result.get(\"headers\", []): if header[0] == b\"Content-Type\": content_type_header_value = header[1].decode(\"utf8\")", "\"issuer\": \"test-issuer\"}}) def test_login_iss(self) -> None: \"\"\"Test validating the issuer", "in TEST_CLIENT_REDIRECT_URL EXPECTED_CLIENT_REDIRECT_URL_PARAMS = [(\"<ab c>\", \"\"), ('q\" =+\"', '\"fö&=o\"')]", "200, channel.result) content_type_headers = channel.headers.getRawHeaders(\"Content-Type\") assert content_type_headers self.assertTrue(content_type_headers[-1].startswith(\"text/html\")) p =", "template. self.assertEqual(channel.code, 403) self.assertIn(b\"SSO account deactivated\", channel.result[\"body\"]) @skip_unless(HAS_JWT, \"requires jwt\")", "to it\"\"\" channel = self._make_sso_redirect_request(\"oidc\") self.assertEqual(channel.code, 302, channel.result) location_headers =", "query = link.split(\"?\", 1) self.assertEqual(path, \"pick_idp\") params = urllib.parse.parse_qs(query) self.assertEqual(params[\"redirectUrl\"],", "using http in the public_baseurl stops Synapse trying to redirect", "\"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": \"fibble_wibble\"}, } channel =", "self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code, 401, channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True)", "+ urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=saml\", ) self.assertEqual(channel.code, 302, channel.result) location_headers =", "+ \"&idp=oidc\", ) self.assertEqual(channel.code, 302, channel.result) location_headers = channel.headers.getRawHeaders(\"Location\") assert", "login_token = params[2][1] # finally, submit the matrix login token", "url_parts[4] = urllib.parse.urlencode(query) cas_ticket_url = urllib.parse.urlunparse(url_parts) # Get Synapse to", "\"SAML\"}, {\"id\": \"oidc-idp1\", \"name\": \"IDP1\"}, {\"id\": \"oidc\", \"name\": \"OIDC\"}, ],", "-> None: \"\"\"Tests that the SSO login flow serves a", "# we shouldn't be able to make requests without an", "channel.result) def test_login_appservice_wrong_as(self) -> None: \"\"\"Test that as users cannot", "of twisted don't do form-parsing without a valid # content-length", "test_login_jwt_valid_registered(self) -> None: self.register_user(\"kermit\", \"monkey\") channel = self.jwt_login({\"sub\": \"kermit\"}) self.assertEqual(channel.result[\"code\"],", "flow channel = self.helper.auth_via_oidc( {\"sub\": \"tester\", \"displayname\": \"Jonny\"}, TEST_CLIENT_REDIRECT_URL )", "we shouldn't be able to make requests without an access", "login.register_servlets, register.register_servlets, ] def make_homeserver(self, reactor: MemoryReactor, clock: Clock) ->", "service param - ie, the # place that CAS will", "\"JWT validation failed: Invalid audience\" ) def test_login_default_sub(self) -> None:", "the OIDC server self.assertEqual(oidc_uri_path, TEST_OIDC_AUTH_ENDPOINT) # ... and should have", "session = username_mapping_sessions[session_id] self.assertEqual(session.remote_user_id, \"tester\") self.assertEqual(session.display_name, \"Jonny\") self.assertEqual(session.client_redirect_url, TEST_CLIENT_REDIRECT_URL) #", "str(i)}, \"password\": \"<PASSWORD>\", } channel = self.make_request(b\"POST\", LOGIN_URL, params) if", "TEST_OIDC_CONFIG # additional OIDC providers config[\"oidc_providers\"] = [ { \"idp_id\":", "we're sure not to be bothered by the per-user #", "we should be soft-logouted channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token) self.assertEqual(channel.code,", "self.setup_test_homeserver( config=config, proxied_http_client=mocked_http_client, ) return self.hs def prepare(self, reactor: MemoryReactor,", "def test_login_aud(self) -> None: \"\"\"Test validating the audience claim.\"\"\" #", "content-length header. (\"Content-Length\", str(len(content))), ], ) self.assertEqual(chan.code, 302, chan.result) location_headers", "cas_ticket_url) self.assertEqual(channel.code, 302) location_headers = channel.headers.getRawHeaders(\"Location\") assert location_headers self.assertEqual(location_headers[0][: len(redirect_url)],", "returns False, so synapse will see the requested uri as", "channel.json_body[\"error\"], 'JWT validation failed: Token is missing the \"aud\" claim',", "in channel.json_body[\"flows\"]], expected_flow_types ) flows = {flow[\"type\"]: flow for flow", "ApplicationService.NS_ROOMS: [], ApplicationService.NS_ALIASES: [], }, ) self.hs.get_datastores().main.services_cache.append(self.service) self.hs.get_datastores().main.services_cache.append(self.another_service) return self.hs", "jwt_secret, \"algorithm\": jwt_algorithm, } def default_config(self) -> Dict[str, Any]: config", "in range(0, 6): params = { \"type\": \"m.login.password\", \"identifier\": {\"type\":", "\"\"\" base_url = \"/_matrix/client/r0/login/cas/ticket?redirectUrl\" redirect_url = \"https://dodgy-site.com/\" url_parts = list(urllib.parse.urlparse(base_url))", "self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Signature verification failed\", ) AS_USER", "def test_POST_ratelimiting_per_address(self) -> None: # Create different users so we're", "channel = self.make_request(b\"POST\", LOGIN_URL, params) return channel def test_login_jwt_valid_registered(self) ->", "channel.result[\"body\"]) @skip_unless(HAS_JWT, \"requires jwt\") class JWTTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource,", "\"m.login.application_service\"}, {\"type\": \"uk.half-shot.msc2778.login.application_service\"}, ] class LoginRestServletTestCase(unittest.HomeserverTestCase): servlets = [ synapse.rest.admin.register_servlets_for_client_rest_resource,", "params = { \"type\": \"m.login.password\", \"identifier\": {\"type\": \"m.id.user\", \"user\": \"kermit\"},", "\"Token field for JWT is missing\") # The JWTPubKeyTestCase is", "TEST_CLIENT_REDIRECT_URL ) # that should redirect to the username picker", "\"License\"); # you may not use this file except in", "\"<KEY> \"-----END PUBLIC KEY-----\", ] ) # This key is", "return d def test_get_login_flows(self) -> None: \"\"\"GET /login should return", "\"/_matrix/client/r0/login/sso/redirect\" if idp_prov is not None: endpoint += \"/\" +", "channel.result) self.assertEqual(channel.json_body[\"errcode\"], \"M_FORBIDDEN\") self.assertEqual( channel.json_body[\"error\"], \"JWT validation failed: Invalid audience\"", "as well \"address\": {\"per_second\": 10000, \"burst_count\": 10000}, \"failed_attempts\": {\"per_second\": 0.17,", "to /_matrix/client/r0/login/sso/redirect ... possibly specifying an IDP provider \"\"\" endpoint", "config[\"public_baseurl\"] = BASE_URL config[\"cas_config\"] = { \"enabled\": True, \"server_url\": CAS_SERVER,", "\"@%s:test\" % cas_user_id async def get_raw(uri: str, args: Any) ->", "server\"\"\" channel = self.make_request( \"GET\", \"/_synapse/client/pick_idp?redirectUrl=\" + urllib.parse.quote_plus(TEST_CLIENT_REDIRECT_URL) + \"&idp=saml\",", ") # This key is used to sign tokens that", "5}, # Prevent the account login ratelimiter from raising first", "\"\\n\".join( [ \"-----BEGIN RSA PRIVATE KEY-----\", \"<KEY>\", \"<KEY>\", \"<KEY>KVaZ/gTOM9+9MwlmhidrUOweKfB/\", \"kQIhAPZwHazbjo7dYlJs7wPQz1vd+aHSEH+3uQKIysebkmm3AiEA1nc6mDdmgiUq\",", "\"enabled\": True, \"secret\": jwt_secret, \"algorithm\": jwt_algorithm, } def default_config(self) ->", "path, query = link.split(\"?\", 1) self.assertEqual(path, \"pick_idp\") params = urllib.parse.parse_qs(query)", "of the user's sessions channel = self.make_request(b\"POST\", \"/logout/all\", access_token=access_token) self.assertEqual(channel.result[\"code\"],", "super().default_config() config[\"jwt_config\"] = { \"enabled\": True, \"secret\": self.jwt_pubkey, \"algorithm\": \"RS256\",", "self.assertEqual(channel.json_body[\"errcode\"], \"M_UNKNOWN_TOKEN\") self.assertEqual(channel.json_body[\"soft_logout\"], True) # Now try to hard log", "the address login ratelimiter from raising first # # This", "\"https://issuer1/token\", \"userinfo_endpoint\": \"https://issuer1/userinfo\", \"user_mapping_provider\": { \"config\": {\"localpart_template\": \"{{ user.sub }}\"}", "and should have set a cookie including the redirect url", "= self.jwt_login({\"sub\": \"kermit\", \"iss\": \"invalid\"}) self.assertEqual(channel.result[\"code\"], b\"200\", channel.result) self.assertEqual(channel.json_body[\"user_id\"], \"@kermit:test\")", "import build_synapse_client_resource_tree from synapse.server import HomeServer from synapse.types import create_requester", "to # https://.... BASE_URL = \"http://%s/\" % (SYNAPSE_SERVER_PUBLIC_HOSTNAME,) # CAS", "{ # Prevent the address login ratelimiter from raising first", "config.get(\"public_baseurl\") or \"https://matrix.goodserver.com:8448\" ) config[\"cas_config\"] = { \"enabled\": True, \"server_url\":", "from typing import Any, Dict, List, Optional, Union from unittest.mock", "from a custom subject claim.\"\"\" channel = self.jwt_login({\"username\": \"frog\"}) self.assertEqual(channel.result[\"code\"],", "self.assertEqual(channel.json_body[\"error\"], \"Token field for JWT is missing\") # The JWTPubKeyTestCase", "= { \"type\": login.LoginRestServlet.APPSERVICE_TYPE, \"identifier\": {\"type\": \"m.id.user\", \"user\": self.service.sender}, }", "None: \"\"\"Test reading user ID from a custom subject claim.\"\"\"", "which should serve a redirect # to the completion page", "to check it has fields assumed elsewhere in this class", "def test_multi_sso_redirect_to_saml(self) -> None: \"\"\"If SAML is chosen, should redirect", "fail self.assertEqual( set(channel.json_body.keys()), {\"flows\", \"params\", \"session\"}, channel.result, ) auth =", "super().default_config() # If jwt_config has been defined (eg via @override_config),", "tokens that shouldn't be accepted by synapse. # Generated just", "# which sets these values to 10000, but as we're", "and we should be soft-logouted channel = self.make_request(b\"GET\", TEST_URL, access_token=access_token)", "appservice login method \"\"\" self.register_appservice_user(AS_USER, self.service.token) params = { \"type\":", "Dict[str, Resource]: d = super().create_resource_dict() d.update(build_synapse_client_resource_tree(self.hs)) return d def test_get_login_flows(self)", "jwt_encode(self, payload: Dict[str, Any], secret: str = jwt_privatekey) -> str:", "-> None: \"\"\"Perform the UI-Auth to delete a device\"\"\" channel", "PUBLIC KEY-----\", \"<KEY>\", \"<KEY> \"-----END PUBLIC KEY-----\", ] ) #", "client redirect url saml_uri_params = urllib.parse.parse_qs(saml_uri_query) relay_state_param = saml_uri_params[\"RelayState\"][0] self.assertEqual(relay_state_param,", "if we delete that device, it will be a proper", "None: self.register_user(\"kermit\", \"monkey\") # we shouldn't be able to make" ]
[ "in range(5): publisher.publish(request) sleep(0.5) rclpy.shutdown() print(f'Delivery request submitted to {args.robot_type}')", "= args.dropoff for _ in range(5): publisher.publish(request) sleep(0.5) rclpy.shutdown() print(f'Delivery", "dispenser - parameters: [request_guid: xxx, target_guid:cssdbot, transporter_type:mir] dropoff_place_name: ot_prep_room dropoff_behavior:", "argparse import sys from time import sleep import uuid import", "of robot', default='magni') args = parser.parse_args(args_without_ros[1:]) node = rclpy.create_node('loop_request_publisher') publisher", "- parameters: [request_guid: xxx, target_guid:cssdbot, transporter_type:mir] dropoff_place_name: ot_prep_room dropoff_behavior: -", "pickup_place_name: cssd_room pickup_behavior: - name: dispenser - parameters: [request_guid: xxx,", "str(uuid.uuid1()) request.pickup_place_name = args.pickup request.dropoff_place_name = args.dropoff for _ in", "request.dropoff_place_name = args.dropoff for _ in range(5): publisher.publish(request) sleep(0.5) rclpy.shutdown()", "def main(argv = sys.argv): rclpy.init(args=argv) args_without_ros = rclpy.utilities.remove_ros_args(argv) ''' #", "args.task_id else: request.task_id = 'delivery#' + str(uuid.uuid1()) request.pickup_place_name = args.pickup", "parameters: [request_guid: yyy, target_guid:otbot, transporter_type:mir] ''' parser = argparse.ArgumentParser() parser.add_argument('-p',", "rclpy.utilities.remove_ros_args(argv) ''' # Example request: task_id: randomid_001 items: [itemA, itemB....]", "sleep import uuid import rclpy from rmf_task_msgs.msg import Delivery def", "sys.argv): rclpy.init(args=argv) args_without_ros = rclpy.utilities.remove_ros_args(argv) ''' # Example request: task_id:", "import argparse import sys from time import sleep import uuid", "parser = argparse.ArgumentParser() parser.add_argument('-p', '--pickup', default='pantry', help='Start waypoint') parser.add_argument('-d', '--dropoff',", "sleep(0.5) rclpy.shutdown() print(f'Delivery request submitted to {args.robot_type}') if __name__ ==", "main(argv = sys.argv): rclpy.init(args=argv) args_without_ros = rclpy.utilities.remove_ros_args(argv) ''' # Example", "= parser.parse_args(args_without_ros[1:]) node = rclpy.create_node('loop_request_publisher') publisher = node.create_publisher(Delivery, 'delivery_requests', 10)", "- parameters: [request_guid: yyy, target_guid:otbot, transporter_type:mir] ''' parser = argparse.ArgumentParser()", "= Delivery() if args.task_id: request.task_id = args.task_id else: request.task_id =", "= node.create_publisher(Delivery, 'delivery_requests', 10) sleep(0.5) request = Delivery() if args.task_id:", "import Delivery def main(argv = sys.argv): rclpy.init(args=argv) args_without_ros = rclpy.utilities.remove_ros_args(argv)", "items: [itemA, itemB....] pickup_place_name: cssd_room pickup_behavior: - name: dispenser -", "= args.task_id else: request.task_id = 'delivery#' + str(uuid.uuid1()) request.pickup_place_name =", "'--task-id', help='Task ID', default='', type=str) parser.add_argument('-r', '--robot-type', help='Type of robot',", "args.pickup request.dropoff_place_name = args.dropoff for _ in range(5): publisher.publish(request) sleep(0.5)", "= argparse.ArgumentParser() parser.add_argument('-p', '--pickup', default='pantry', help='Start waypoint') parser.add_argument('-d', '--dropoff', default='hardware_2',", "'delivery_requests', 10) sleep(0.5) request = Delivery() if args.task_id: request.task_id =", "args.task_id: request.task_id = args.task_id else: request.task_id = 'delivery#' + str(uuid.uuid1())", "target_guid:otbot, transporter_type:mir] ''' parser = argparse.ArgumentParser() parser.add_argument('-p', '--pickup', default='pantry', help='Start", "from time import sleep import uuid import rclpy from rmf_task_msgs.msg", "request: task_id: randomid_001 items: [itemA, itemB....] pickup_place_name: cssd_room pickup_behavior: -", "default='magni') args = parser.parse_args(args_without_ros[1:]) node = rclpy.create_node('loop_request_publisher') publisher = node.create_publisher(Delivery,", "rclpy.create_node('loop_request_publisher') publisher = node.create_publisher(Delivery, 'delivery_requests', 10) sleep(0.5) request = Delivery()", "ot_prep_room dropoff_behavior: - name: dispenser - parameters: [request_guid: yyy, target_guid:otbot,", "import rclpy from rmf_task_msgs.msg import Delivery def main(argv = sys.argv):", "transporter_type:mir] ''' parser = argparse.ArgumentParser() parser.add_argument('-p', '--pickup', default='pantry', help='Start waypoint')", "cssd_room pickup_behavior: - name: dispenser - parameters: [request_guid: xxx, target_guid:cssdbot,", "help='Start waypoint') parser.add_argument('-d', '--dropoff', default='hardware_2', help='Finish waypoint') parser.add_argument('-i', '--task-id', help='Task", "Delivery() if args.task_id: request.task_id = args.task_id else: request.task_id = 'delivery#'", "waypoint') parser.add_argument('-d', '--dropoff', default='hardware_2', help='Finish waypoint') parser.add_argument('-i', '--task-id', help='Task ID',", "dropoff_behavior: - name: dispenser - parameters: [request_guid: yyy, target_guid:otbot, transporter_type:mir]", "request.pickup_place_name = args.pickup request.dropoff_place_name = args.dropoff for _ in range(5):", "''' # Example request: task_id: randomid_001 items: [itemA, itemB....] pickup_place_name:", "help='Type of robot', default='magni') args = parser.parse_args(args_without_ros[1:]) node = rclpy.create_node('loop_request_publisher')", "+ str(uuid.uuid1()) request.pickup_place_name = args.pickup request.dropoff_place_name = args.dropoff for _", "publisher.publish(request) sleep(0.5) rclpy.shutdown() print(f'Delivery request submitted to {args.robot_type}') if __name__", "xxx, target_guid:cssdbot, transporter_type:mir] dropoff_place_name: ot_prep_room dropoff_behavior: - name: dispenser -", "randomid_001 items: [itemA, itemB....] pickup_place_name: cssd_room pickup_behavior: - name: dispenser", "robot', default='magni') args = parser.parse_args(args_without_ros[1:]) node = rclpy.create_node('loop_request_publisher') publisher =", "= rclpy.create_node('loop_request_publisher') publisher = node.create_publisher(Delivery, 'delivery_requests', 10) sleep(0.5) request =", "10) sleep(0.5) request = Delivery() if args.task_id: request.task_id = args.task_id", "for _ in range(5): publisher.publish(request) sleep(0.5) rclpy.shutdown() print(f'Delivery request submitted", "else: request.task_id = 'delivery#' + str(uuid.uuid1()) request.pickup_place_name = args.pickup request.dropoff_place_name", "= rclpy.utilities.remove_ros_args(argv) ''' # Example request: task_id: randomid_001 items: [itemA,", "'--dropoff', default='hardware_2', help='Finish waypoint') parser.add_argument('-i', '--task-id', help='Task ID', default='', type=str)", "args.dropoff for _ in range(5): publisher.publish(request) sleep(0.5) rclpy.shutdown() print(f'Delivery request", "import sleep import uuid import rclpy from rmf_task_msgs.msg import Delivery", "dropoff_place_name: ot_prep_room dropoff_behavior: - name: dispenser - parameters: [request_guid: yyy,", "waypoint') parser.add_argument('-i', '--task-id', help='Task ID', default='', type=str) parser.add_argument('-r', '--robot-type', help='Type", "rmf_task_msgs.msg import Delivery def main(argv = sys.argv): rclpy.init(args=argv) args_without_ros =", "rclpy.shutdown() print(f'Delivery request submitted to {args.robot_type}') if __name__ == '__main__':", "from rmf_task_msgs.msg import Delivery def main(argv = sys.argv): rclpy.init(args=argv) args_without_ros", "default='pantry', help='Start waypoint') parser.add_argument('-d', '--dropoff', default='hardware_2', help='Finish waypoint') parser.add_argument('-i', '--task-id',", "parser.add_argument('-i', '--task-id', help='Task ID', default='', type=str) parser.add_argument('-r', '--robot-type', help='Type of", "node = rclpy.create_node('loop_request_publisher') publisher = node.create_publisher(Delivery, 'delivery_requests', 10) sleep(0.5) request", "yyy, target_guid:otbot, transporter_type:mir] ''' parser = argparse.ArgumentParser() parser.add_argument('-p', '--pickup', default='pantry',", "node.create_publisher(Delivery, 'delivery_requests', 10) sleep(0.5) request = Delivery() if args.task_id: request.task_id", "request.task_id = args.task_id else: request.task_id = 'delivery#' + str(uuid.uuid1()) request.pickup_place_name", "sys from time import sleep import uuid import rclpy from", "Delivery def main(argv = sys.argv): rclpy.init(args=argv) args_without_ros = rclpy.utilities.remove_ros_args(argv) '''", "_ in range(5): publisher.publish(request) sleep(0.5) rclpy.shutdown() print(f'Delivery request submitted to", "'--pickup', default='pantry', help='Start waypoint') parser.add_argument('-d', '--dropoff', default='hardware_2', help='Finish waypoint') parser.add_argument('-i',", "if args.task_id: request.task_id = args.task_id else: request.task_id = 'delivery#' +", "import sys from time import sleep import uuid import rclpy", "sleep(0.5) request = Delivery() if args.task_id: request.task_id = args.task_id else:", "# Example request: task_id: randomid_001 items: [itemA, itemB....] pickup_place_name: cssd_room", "task_id: randomid_001 items: [itemA, itemB....] pickup_place_name: cssd_room pickup_behavior: - name:", "- name: dispenser - parameters: [request_guid: xxx, target_guid:cssdbot, transporter_type:mir] dropoff_place_name:", "argparse.ArgumentParser() parser.add_argument('-p', '--pickup', default='pantry', help='Start waypoint') parser.add_argument('-d', '--dropoff', default='hardware_2', help='Finish", "import uuid import rclpy from rmf_task_msgs.msg import Delivery def main(argv", "= sys.argv): rclpy.init(args=argv) args_without_ros = rclpy.utilities.remove_ros_args(argv) ''' # Example request:", "Example request: task_id: randomid_001 items: [itemA, itemB....] pickup_place_name: cssd_room pickup_behavior:", "default='', type=str) parser.add_argument('-r', '--robot-type', help='Type of robot', default='magni') args =", "print(f'Delivery request submitted to {args.robot_type}') if __name__ == '__main__': main(sys.argv)", "publisher = node.create_publisher(Delivery, 'delivery_requests', 10) sleep(0.5) request = Delivery() if", "args = parser.parse_args(args_without_ros[1:]) node = rclpy.create_node('loop_request_publisher') publisher = node.create_publisher(Delivery, 'delivery_requests',", "'delivery#' + str(uuid.uuid1()) request.pickup_place_name = args.pickup request.dropoff_place_name = args.dropoff for", "rclpy from rmf_task_msgs.msg import Delivery def main(argv = sys.argv): rclpy.init(args=argv)", "= 'delivery#' + str(uuid.uuid1()) request.pickup_place_name = args.pickup request.dropoff_place_name = args.dropoff", "args_without_ros = rclpy.utilities.remove_ros_args(argv) ''' # Example request: task_id: randomid_001 items:", "'--robot-type', help='Type of robot', default='magni') args = parser.parse_args(args_without_ros[1:]) node =", "parser.add_argument('-d', '--dropoff', default='hardware_2', help='Finish waypoint') parser.add_argument('-i', '--task-id', help='Task ID', default='',", "request.task_id = 'delivery#' + str(uuid.uuid1()) request.pickup_place_name = args.pickup request.dropoff_place_name =", "parameters: [request_guid: xxx, target_guid:cssdbot, transporter_type:mir] dropoff_place_name: ot_prep_room dropoff_behavior: - name:", "pickup_behavior: - name: dispenser - parameters: [request_guid: xxx, target_guid:cssdbot, transporter_type:mir]", "transporter_type:mir] dropoff_place_name: ot_prep_room dropoff_behavior: - name: dispenser - parameters: [request_guid:", "time import sleep import uuid import rclpy from rmf_task_msgs.msg import", "- name: dispenser - parameters: [request_guid: yyy, target_guid:otbot, transporter_type:mir] '''", "[request_guid: xxx, target_guid:cssdbot, transporter_type:mir] dropoff_place_name: ot_prep_room dropoff_behavior: - name: dispenser", "parser.parse_args(args_without_ros[1:]) node = rclpy.create_node('loop_request_publisher') publisher = node.create_publisher(Delivery, 'delivery_requests', 10) sleep(0.5)", "help='Task ID', default='', type=str) parser.add_argument('-r', '--robot-type', help='Type of robot', default='magni')", "ID', default='', type=str) parser.add_argument('-r', '--robot-type', help='Type of robot', default='magni') args", "target_guid:cssdbot, transporter_type:mir] dropoff_place_name: ot_prep_room dropoff_behavior: - name: dispenser - parameters:", "type=str) parser.add_argument('-r', '--robot-type', help='Type of robot', default='magni') args = parser.parse_args(args_without_ros[1:])", "= args.pickup request.dropoff_place_name = args.dropoff for _ in range(5): publisher.publish(request)", "help='Finish waypoint') parser.add_argument('-i', '--task-id', help='Task ID', default='', type=str) parser.add_argument('-r', '--robot-type',", "dispenser - parameters: [request_guid: yyy, target_guid:otbot, transporter_type:mir] ''' parser =", "name: dispenser - parameters: [request_guid: xxx, target_guid:cssdbot, transporter_type:mir] dropoff_place_name: ot_prep_room", "[request_guid: yyy, target_guid:otbot, transporter_type:mir] ''' parser = argparse.ArgumentParser() parser.add_argument('-p', '--pickup',", "''' parser = argparse.ArgumentParser() parser.add_argument('-p', '--pickup', default='pantry', help='Start waypoint') parser.add_argument('-d',", "uuid import rclpy from rmf_task_msgs.msg import Delivery def main(argv =", "name: dispenser - parameters: [request_guid: yyy, target_guid:otbot, transporter_type:mir] ''' parser", "parser.add_argument('-r', '--robot-type', help='Type of robot', default='magni') args = parser.parse_args(args_without_ros[1:]) node", "default='hardware_2', help='Finish waypoint') parser.add_argument('-i', '--task-id', help='Task ID', default='', type=str) parser.add_argument('-r',", "parser.add_argument('-p', '--pickup', default='pantry', help='Start waypoint') parser.add_argument('-d', '--dropoff', default='hardware_2', help='Finish waypoint')", "<gh_stars>0 import argparse import sys from time import sleep import", "itemB....] pickup_place_name: cssd_room pickup_behavior: - name: dispenser - parameters: [request_guid:", "rclpy.init(args=argv) args_without_ros = rclpy.utilities.remove_ros_args(argv) ''' # Example request: task_id: randomid_001", "request = Delivery() if args.task_id: request.task_id = args.task_id else: request.task_id", "range(5): publisher.publish(request) sleep(0.5) rclpy.shutdown() print(f'Delivery request submitted to {args.robot_type}') if", "[itemA, itemB....] pickup_place_name: cssd_room pickup_behavior: - name: dispenser - parameters:" ]
[ "log.warning( f\"{route.endpoint} Has exceeded it's ratelimit ({lock.limit})! Reset in {lock.delta}", "logging.getLogger(logger_name) class GlobalLock: \"\"\"Manages the global ratelimit\"\"\" def __init__(self) ->", "\"\"\" if bucket_hash := self._endpoints.get(route.rl_bucket): # we have seen this", "import aiohttp from aiohttp import BaseConnector, ClientSession, ClientWebSocketResponse, FormData from", "limit by throttling http requests if self.__session.closed: await self.login(self.token) async", "the interaction with discords http endpoints.\"\"\" import asyncio import logging", "= CooldownSystem( 45, 1 ) # global rate-limit is 50", "know which bucket it is associated with lock = self.ratelimit_locks.get(bucket_hash)", "this route before, we know which bucket it is associated", "self.__session.request(route.method, route.url, **kwargs) as response: result = await response_decode(response) self.ingest_ratelimit(route,", "-> Any: \"\"\" Make a request to discord. parameters: route:", "Route __all__ = [\"HTTPClient\"] log = logging.getLogger(logger_name) class GlobalLock: \"\"\"Manages", "exists, return a new lock return BucketLock() def ingest_ratelimit(self, route:", "response elif response.status in {500, 502, 504}: # Server issues,", "result): log.error(f\"{route.method}::{route.url}: {response.status}\") if response.status == 403: raise Forbidden(response, response_data=result,", "return await self.request(Route(\"GET\", \"/users/@me\")) except HTTPException as e: if e.status", "asyncio.sleep(self.cooldown_system.get_cooldown_time()) async def lock(self, delta: float) -> None: \"\"\" Lock", "0.0 def __repr__(self) -> str: return f\"<BucketLock: {self.bucket_hash or 'Generic'}>\"", "\"\"\"Return True if lock is acquired.\"\"\" return self._lock.locked() def unlock(self)", "= True self.bucket_hash: Optional[str] = None self.limit: int = -1", ") await asyncio.sleep(1 + attempt * 2) continue if not", "async with self._lock: while not self.cooldown_system.acquire_token(): await asyncio.sleep(self.cooldown_system.get_cooldown_time()) async def", "requests {url} from CDN\") async with self.__session.get(url) as response: if", "self.__session.close() async def get_gateway(self) -> str: \"\"\"Get the gateway url.\"\"\"", "Route) -> BucketLock: \"\"\" Get a route's rate limit bucket.", "self.global_lock: GlobalLock = GlobalLock() self._max_attempts: int = 3 self.ratelimit_locks: WeakValueDictionary[str,", "Any: \"\"\" Make a request to discord. parameters: route: The", "None self.global_lock: GlobalLock = GlobalLock() self._max_attempts: int = 3 self.ratelimit_locks:", "*args) -> None: if self.unlock_on_exit and self._lock.locked(): self.unlock() self.unlock_on_exit =", "EmojiRequests, GuildRequests, InteractionRequests, MemberRequests, MessageRequests, ReactionRequests, StickerRequests, ThreadRequests, UserRequests, WebhookRequests,", "elif response.status == 404: raise NotFound(response, response_data=result, route=route) elif response.status", "Attach a reason to this request, used for audit logs", "the bucket hash has been set (ignores unlimited endpoints) log.debug(f\"Caching", "elif response.status in {500, 502, 504}: # Server issues, retry", "cached lock exists, return a new lock return BucketLock() def", "# We only ever try and cache the bucket if", "-> dict: \"\"\" \"Login\" to the gateway, basically validates the", "-> None: async with self._lock: while not self.cooldown_system.acquire_token(): await asyncio.sleep(self.cooldown_system.get_cooldown_time())", "self.token: Optional[str] = None self.global_lock: GlobalLock = GlobalLock() self._max_attempts: int", "reason: Attach a reason to this request, used for audit", "if isinstance(data, (list, dict)): kwargs[\"headers\"][\"Content-Type\"] = \"application/json\" # sanity check", "if self.__session.closed: await self.login(self.token) async with self.__session.request(route.method, route.url, **kwargs) as", "**kwargs) as response: result = await response_decode(response) self.ingest_ratelimit(route, response.headers, lock)", "route before, we know which bucket it is associated with", "json: A json payload to send in the request reason:", "header: A header from a http response \"\"\" self.bucket_hash =", "and wait for unlock continue elif lock.remaining == 0: #", "to this request, used for audit logs \"\"\" # Assemble", "are infrequent we're doing well log.warning( f\"{route.endpoint} Has exceeded it's", "existing ratelimit for the respective buckethash # otherwise a brand-new", "{500, 502, 504}: # Server issues, retry log.warning( f\"{route.endpoint} Received", "we use 45 self._lock: asyncio.Lock = asyncio.Lock() async def rate_limit(self)", "self._lock: asyncio.Lock = asyncio.Lock() self.unlock_on_exit: bool = True self.bucket_hash: Optional[str]", "Ingests a ratelimit header from discord to determine ratelimit. Args:", "exceeded if result.get(\"global\", False): # if we get a global,", "before, we know which bucket it is associated with lock", "lock on this route, it'll still be in the cache", "to use returns: The currently logged in bot's data \"\"\"", "route \"\"\" if bucket_hash := self._endpoints.get(route.rl_bucket): # we have seen", "still be in the cache # return that lock return", "request_cdn(self, url, asset) -> bytes: log.debug(f\"{asset} requests {url} from CDN\")", "to connect to \"\"\" return await self.__session.ws_connect( url, timeout=30, max_msg_size=0,", "keep the lock acquired \"\"\" await self._lock.acquire() await asyncio.sleep(delta) self._lock.release()", "[\"HTTPClient\"] log = logging.getLogger(logger_name) class GlobalLock: \"\"\"Manages the global ratelimit\"\"\"", "= {\"User-Agent\": self.user_agent} if self.token: kwargs[\"headers\"][\"Authorization\"] = f\"Bot {self.token}\" if", "lock = self.ratelimit_locks.get(bucket_hash) if lock: # if we have an", "ratelimit for header: The rate limit header in question bucket_lock:", "InteractionRequests, MemberRequests, MessageRequests, ReactionRequests, StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ):", "http response \"\"\" self.bucket_hash = header.get(\"x-ratelimit-bucket\") self.limit = int(header.get(\"x-ratelimit-limit\") or", "0.0)) async def blind_defer_unlock(self) -> None: \"\"\"Unlocks the BucketLock but", "self.limit: int = -1 self.remaining: int = -1 self.delta: float", "dict)): kwargs[\"headers\"][\"Content-Type\"] = \"application/json\" # sanity check payload if isinstance(data,", "bucket lock will be returned for attempt in range(self._max_attempts): async", "continue processing the current response elif response.status in {500, 502,", "raise async def close(self) -> None: \"\"\"Close the session.\"\"\" if", "\"\"\" Lock the global lock for a given duration. Args:", "response, route, result): log.error(f\"{route.method}::{route.url}: {response.status}\") if response.status == 403: raise", "for audit logs \"\"\" # Assemble headers kwargs[\"headers\"] = {\"User-Agent\":", "async def request( self, route: Route, data: Absent[Union[dict, FormData]] =", "if we have an active lock on this route, it'll", "isinstance(x, dict) else x for x in data] elif isinstance(data,", "50 per second, conservatively we use 45 self._lock: asyncio.Lock =", "def request( self, route: Route, data: Absent[Union[dict, FormData]] = MISSING,", "Has exceeded it's ratelimit ({lock.limit})! Reset in {lock.delta} seconds\" )", "response_data=result, route=route) else: raise HTTPException(response, response_data=result, route=route) async def request_cdn(self,", "InteractionRequests, MemberRequests, MessageRequests, ReactionRequests, StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, )", "return a new lock return BucketLock() def ingest_ratelimit(self, route: Route,", "Forbidden, GatewayNotFound, HTTPException, NotFound, LoginError from dis_snek.client.utils.input_utils import response_decode from", "self.__session.closed: self.loop.run_until_complete(self.__session.close()) def get_ratelimit(self, route: Route) -> BucketLock: \"\"\" Get", "token to use returns: The currently logged in bot's data", "these are infrequent we're doing well log.warning( f\"{route.endpoint} Has exceeded", "or 'Generic'}>\" @property def locked(self) -> bool: \"\"\"Return True if", "logger_name, MISSING, Absent from dis_snek.client.errors import DiscordError, Forbidden, GatewayNotFound, HTTPException,", "\"\"\" await self._lock.acquire() await asyncio.sleep(delta) self._lock.release() class BucketLock: \"\"\"Manages the", "self.ratelimit_locks.get(bucket_hash) if lock: # if we have an active lock", "bucket if the bucket hash has been set (ignores unlimited", "aiohttp import BaseConnector, ClientSession, ClientWebSocketResponse, FormData from multidict import CIMultiDictProxy", "the bucket if the bucket hash has been set (ignores", "= bucket_lock async def request( self, route: Route, data: Absent[Union[dict,", "* 2} seconds\" ) await asyncio.sleep(1 + attempt * 2)", "asyncio import logging from typing import Any, Dict, Optional, Union", "= asyncio.get_running_loop() loop.call_later(self.delta, self.unlock) async def defer_unlock(self) -> None: \"\"\"Unlocks", "response_data=result, route=route) elif response.status >= 500: raise DiscordError(response, response_data=result, route=route)", "await self.__session.ws_connect( url, timeout=30, max_msg_size=0, autoclose=False, headers={\"User-Agent\": self.user_agent}, compress=0 )", "<filename>dis_snek/api/http/http_client.py \"\"\"This file handles the interaction with discords http endpoints.\"\"\"", "the ratelimit for each bucket\"\"\" def __init__(self) -> None: self._lock:", "= True class HTTPClient( BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests, MemberRequests,", "not self.cooldown_system.acquire_token(): await asyncio.sleep(self.cooldown_system.get_cooldown_time()) async def lock(self, delta: float) ->", "WeakValueDictionary() self._endpoints = {} self.user_agent: str = ( f\"DiscordBot ({__repo_url__}", "Python/{__py_version__}) aiohttp/{aiohttp.__version__}\" ) def __del__(self): if self.__session and not self.__session.closed:", "# if we have an active lock on this route,", "it is associated with lock = self.ratelimit_locks.get(bucket_hash) if lock: #", "isinstance(data, list): kwargs[\"json\"] = [dict_filter_missing(x) if isinstance(x, dict) else x", "global, that's pretty bad, this would usually happen if the", "gateway, basically validates the token and grabs user data. parameters:", "\"\"\" Ingests a discord rate limit header to configure this", "wait for completion.\"\"\" self.unlock_on_exit = False loop = asyncio.get_running_loop() loop.call_later(self.delta,", "bucket, lock until reset log.debug( f\"{route.endpoint} Has exhausted its ratelimit", "@property def locked(self) -> bool: \"\"\"Return True if lock is", "BucketLock: \"\"\" Get a route's rate limit bucket. Args: route:", "int = 3 self.ratelimit_locks: WeakValueDictionary[str, BucketLock] = WeakValueDictionary() self._endpoints =", "from dis_snek.client.utils.serializer import dict_filter_missing from dis_snek.models import CooldownSystem from .route", "associated with lock = self.ratelimit_locks.get(bucket_hash) if lock: # if we", "The route we're ingesting ratelimit for header: The rate limit", "_uriquote(reason, safe=\"/ \") if isinstance(data, (list, dict)): kwargs[\"headers\"][\"Content-Type\"] = \"application/json\"", "a brand-new bucket lock will be returned for attempt in", "= await response_decode(response) self.ingest_ratelimit(route, response.headers, lock) if response.status == 429:", "\"\"\" bucket_lock.ingest_ratelimit_header(header) if bucket_lock.bucket_hash: # We only ever try and", "bucket. Args: route: The route to fetch the ratelimit bucket", "await self.global_lock.rate_limit() # prevent us exceeding the global rate limit", ") def __del__(self): if self.__session and not self.__session.closed: self.loop.run_until_complete(self.__session.close()) def", "for the respective buckethash # otherwise a brand-new bucket lock", "def _raise_exception(self, response, route, result): log.error(f\"{route.method}::{route.url}: {response.status}\") if response.status ==", "from dis_snek.client.const import __py_version__, __repo_url__, __version__, logger_name, MISSING, Absent from", "self.login(self.token) async with self.__session.request(route.method, route.url, **kwargs) as response: result =", "# this gets a BucketLock for this route. # If", "self.unlock_on_exit = False loop = asyncio.get_running_loop() loop.call_later(self.delta, self.unlock) async def", "unlock continue elif lock.remaining == 0: # Last call available", "-> None: \"\"\" Ingests a discord rate limit header to", "try and cache the bucket if the bucket hash has", "# Last call available in the bucket, lock until reset", "self.bucket_hash: Optional[str] = None self.limit: int = -1 self.remaining: int", "token and grabs user data. parameters: token: the token to", "bad, this would usually happen if the user is hitting", "Absent[str] = MISSING, **kwargs: Dict[str, Any], ) -> Any: \"\"\"", "def __init__(self, connector: Optional[BaseConnector] = None, loop: Optional[asyncio.AbstractEventLoop] = None):", "has exceeded global ratelimit, locking REST API for {result.get('retry_after')} seconds\"", "discord. parameters: route: The route to take json: A json", "is associated with lock = self.ratelimit_locks.get(bucket_hash) if lock: # if", "asyncio.Lock = asyncio.Lock() async def rate_limit(self) -> None: async with", "Last call available in the bucket, lock until reset log.debug(", "reset log.debug( f\"{route.endpoint} Has exhausted its ratelimit ({lock.limit})! Locking route", "{url} from CDN\") async with self.__session.get(url) as response: if response.status", "# Assemble headers kwargs[\"headers\"] = {\"User-Agent\": self.user_agent} if self.token: kwargs[\"headers\"][\"Authorization\"]", "response_decode(response)) async def login(self, token: str) -> dict: \"\"\" \"Login\"", "except HTTPException as exc: raise GatewayNotFound from exc return \"{0}?encoding={1}&v=9&compress=zlib-stream\".format(data[\"url\"],", "404: raise NotFound(response, response_data=result, route=route) elif response.status >= 500: raise", "\"\"\"Close the session.\"\"\" if self.__session: await self.__session.close() async def get_gateway(self)", "brand-new bucket lock will be returned for attempt in range(self._max_attempts):", "self.delta = float(header.get(\"x-ratelimit-reset-after\", 0.0)) async def blind_defer_unlock(self) -> None: \"\"\"Unlocks", "we have an active lock on this route, it'll still", "CooldownSystem = CooldownSystem( 45, 1 ) # global rate-limit is", "with self._lock: while not self.cooldown_system.acquire_token(): await asyncio.sleep(self.cooldown_system.get_cooldown_time()) async def lock(self,", "response \"\"\" self.bucket_hash = header.get(\"x-ratelimit-bucket\") self.limit = int(header.get(\"x-ratelimit-limit\") or -1)", "have seen this route before, we know which bucket it", "Assemble headers kwargs[\"headers\"] = {\"User-Agent\": self.user_agent} if self.token: kwargs[\"headers\"][\"Authorization\"] =", "= dict_filter_missing(data) elif isinstance(data, FormData): kwargs[\"data\"] = data lock =", "response: result = await response_decode(response) self.ingest_ratelimit(route, response.headers, lock) if response.status", "True self.bucket_hash: Optional[str] = None self.limit: int = -1 self.remaining:", "= None self.global_lock: GlobalLock = GlobalLock() self._max_attempts: int = 3", "current response elif response.status in {500, 502, 504}: # Server", "x in data] elif isinstance(data, dict): kwargs[\"json\"] = dict_filter_missing(data) elif", "locking REST API for {result.get('retry_after')} seconds\" ) await self.global_lock.lock(float(result.get(\"retry_after\"))) continue", "bucket for Returns: The BucketLock object for this route \"\"\"", "if isinstance(x, dict) else x for x in data] elif", "data lock = self.get_ratelimit(route) # this gets a BucketLock for", "= self.ratelimit_locks.get(bucket_hash) if lock: # if we have an active", "-1 self.remaining: int = -1 self.delta: float = 0.0 def", "route: The route we're ingesting ratelimit for header: The rate", "None: \"\"\" Lock the global lock for a given duration.", "True if lock is acquired.\"\"\" return self._lock.locked() def unlock(self) ->", "GlobalLock = GlobalLock() self._max_attempts: int = 3 self.ratelimit_locks: WeakValueDictionary[str, BucketLock]", "an active lock on this route, it'll still be in", "elif isinstance(data, FormData): kwargs[\"data\"] = data lock = self.get_ratelimit(route) #", "kwargs[\"headers\"][\"X-Audit-Log-Reason\"] = _uriquote(reason, safe=\"/ \") if isinstance(data, (list, dict)): kwargs[\"headers\"][\"Content-Type\"]", "the user is hitting the api from 2 clients sharing", "lock until reset log.debug( f\"{route.endpoint} Has exhausted its ratelimit ({lock.limit})!", "are unfortunately unavoidable, but we can attempt to avoid them", "in the request reason: Attach a reason to this request,", "async def defer_unlock(self) -> None: \"\"\"Unlocks the BucketLock after a", "doesn't wait for completion.\"\"\" self.unlock_on_exit = False loop = asyncio.get_running_loop()", "connector self.loop = asyncio.get_event_loop() if loop is None else loop", "the request reason: Attach a reason to this request, used", "logs \"\"\" # Assemble headers kwargs[\"headers\"] = {\"User-Agent\": self.user_agent} if", "lock.blind_defer_unlock() # lock this route, but continue processing the current", "client for sending requests to the Discord API.\"\"\" def __init__(self,", "the token and grabs user data. parameters: token: the token", "request, used for audit logs \"\"\" # Assemble headers kwargs[\"headers\"]", "async def _raise_exception(self, response, route, result): log.error(f\"{route.method}::{route.url}: {response.status}\") if response.status", "= None): self.connector: Optional[BaseConnector] = connector self.loop = asyncio.get_event_loop() if", "== 404: raise NotFound(response, response_data=result, route=route) elif response.status >= 500:", "self.__session.get(url) as response: if response.status == 200: return await response.read()", "typing import Any, Dict, Optional, Union from urllib.parse import quote", "BucketLock after a specified delay.\"\"\" self.unlock_on_exit = False await asyncio.sleep(self.delta)", "range(self._max_attempts): async with lock: try: await self.global_lock.rate_limit() # prevent us", "for {lock.delta} seconds\" ) await lock.blind_defer_unlock() # lock this route,", "self._lock.locked() def unlock(self) -> None: \"\"\"Unlock this bucket.\"\"\" self._lock.release() def", "-> None: \"\"\"Unlocks the BucketLock but doesn't wait for completion.\"\"\"", "WebhookRequests, ScheduledEventsRequests, ): \"\"\"A http client for sending requests to", ">= 200: await self._raise_exception(response, route, result) log.debug( f\"{route.endpoint} Received {response.status}", "self._raise_exception(response, route, result) log.debug( f\"{route.endpoint} Received {response.status} :: [{lock.remaining}/{lock.limit} calls", "def __repr__(self) -> str: return f\"<BucketLock: {self.bucket_hash or 'Generic'}>\" @property", "+ attempt * 2) continue raise async def _raise_exception(self, response,", "this request, used for audit logs \"\"\" # Assemble headers", "# otherwise a brand-new bucket lock will be returned for", "hitting the api from 2 clients sharing a token log.error(", "2) continue if not 300 > response.status >= 200: await", "(ignores unlimited endpoints) log.debug(f\"Caching ingested rate limit data for: {bucket_lock.bucket_hash}\")", "raise NotFound(response, response_data=result, route=route) elif response.status >= 500: raise DiscordError(response,", "returns: The currently logged in bot's data \"\"\" self.__session =", "lock.remaining == 0: # Last call available in the bucket,", "this would usually happen if the user is hitting the", "well log.warning( f\"{route.endpoint} Has exceeded it's ratelimit ({lock.limit})! Reset in", "ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests, MemberRequests, MessageRequests, ReactionRequests, StickerRequests, ThreadRequests, UserRequests,", "and self._lock.locked(): self.unlock() self.unlock_on_exit = True class HTTPClient( BotRequests, ChannelRequests,", "user data. parameters: token: the token to use returns: The", "return lock # if no cached lock exists, return a", "raise async def _raise_exception(self, response, route, result): log.error(f\"{route.method}::{route.url}: {response.status}\") if", "async def blind_defer_unlock(self) -> None: \"\"\"Unlocks the BucketLock but doesn't", "WeakValueDictionary import aiohttp from aiohttp import BaseConnector, ClientSession, ClientWebSocketResponse, FormData", "Dict, Optional, Union from urllib.parse import quote as _uriquote from", "if result.get(\"global\", False): # if we get a global, that's", "ratelimit ({lock.limit})! Reset in {lock.delta} seconds\" ) await lock.defer_unlock() #", "a route's rate limit bucket. Args: route: The route to", "global ratelimit, locking REST API for {result.get('retry_after')} seconds\" ) await", "elif response.status >= 500: raise DiscordError(response, response_data=result, route=route) else: raise", "def get_gateway(self) -> str: \"\"\"Get the gateway url.\"\"\" try: data:", "self.__session = ClientSession(connector=self.connector) self.token = token try: return await self.request(Route(\"GET\",", "this bucket lock. Args: header: A header from a http", "int = -1 self.delta: float = 0.0 def __repr__(self) ->", "= \"application/json\" # sanity check payload if isinstance(data, list): kwargs[\"json\"]", "we're ingesting ratelimit for header: The rate limit header in", "we can attempt to avoid them # so long as", "asyncio.sleep(delta) self._lock.release() class BucketLock: \"\"\"Manages the ratelimit for each bucket\"\"\"", "log.debug( f\"{route.endpoint} Received {response.status} :: [{lock.remaining}/{lock.limit} calls remaining]\" ) return", "self._lock: while not self.cooldown_system.acquire_token(): await asyncio.sleep(self.cooldown_system.get_cooldown_time()) async def lock(self, delta:", "a specified delay.\"\"\" self.unlock_on_exit = False await asyncio.sleep(self.delta) self.unlock() async", "> response.status >= 200: await self._raise_exception(response, route, result) log.debug( f\"{route.endpoint}", "= token try: return await self.request(Route(\"GET\", \"/users/@me\")) except HTTPException as", "await self.global_lock.lock(float(result.get(\"retry_after\"))) continue else: # 429's are unfortunately unavoidable, but", "\"\"\"Manages the global ratelimit\"\"\" def __init__(self) -> None: self.cooldown_system: CooldownSystem", "data \"\"\" self.__session = ClientSession(connector=self.connector) self.token = token try: return", "lock. Args: header: A header from a http response \"\"\"", "None: \"\"\" Ingests a ratelimit header from discord to determine", "+ attempt * 2) continue if not 300 > response.status", "aiohttp from aiohttp import BaseConnector, ClientSession, ClientWebSocketResponse, FormData from multidict", "import BaseConnector, ClientSession, ClientWebSocketResponse, FormData from multidict import CIMultiDictProxy from", "bucket_lock async def request( self, route: Route, data: Absent[Union[dict, FormData]]", "retrying in {1 + attempt * 2} seconds\" ) await", "self.unlock() async def __aenter__(self) -> None: await self._lock.acquire() async def", "Dict[str, Any], ) -> Any: \"\"\" Make a request to", "await lock.blind_defer_unlock() # lock this route, but continue processing the", "200: return await response.read() await self._raise_exception(response, asset, await response_decode(response)) async", "reason not in (None, MISSING): kwargs[\"headers\"][\"X-Audit-Log-Reason\"] = _uriquote(reason, safe=\"/ \")", "return await response.read() await self._raise_exception(response, asset, await response_decode(response)) async def", "else x for x in data] elif isinstance(data, dict): kwargs[\"json\"]", "async def login(self, token: str) -> dict: \"\"\" \"Login\" to", "\"\"\" Connect to the websocket. parameters: url: the url to", "{__version__} Python/{__py_version__}) aiohttp/{aiohttp.__version__}\" ) def __del__(self): if self.__session and not", "would usually happen if the user is hitting the api", "-1 self.delta: float = 0.0 def __repr__(self) -> str: return", "None self.limit: int = -1 self.remaining: int = -1 self.delta:", "doing well log.warning( f\"{route.endpoint} Has exceeded it's ratelimit ({lock.limit})! Reset", "await self._raise_exception(response, route, result) log.debug( f\"{route.endpoint} Received {response.status} :: [{lock.remaining}/{lock.limit}", "asyncio.Lock() self.unlock_on_exit: bool = True self.bucket_hash: Optional[str] = None self.limit:", "False loop = asyncio.get_running_loop() loop.call_later(self.delta, self.unlock) async def defer_unlock(self) ->", "Args: route: The route we're ingesting ratelimit for header: The", "{\"User-Agent\": self.user_agent} if self.token: kwargs[\"headers\"][\"Authorization\"] = f\"Bot {self.token}\" if reason", ") from dis_snek.client.const import __py_version__, __repo_url__, __version__, logger_name, MISSING, Absent", "bucket_hash := self._endpoints.get(route.rl_bucket): # we have seen this route before,", "Absent from dis_snek.client.errors import DiscordError, Forbidden, GatewayNotFound, HTTPException, NotFound, LoginError", "bucket_lock: The rate limit bucket for this route \"\"\" bucket_lock.ingest_ratelimit_header(header)", "and grabs user data. parameters: token: the token to use", "self._endpoints[route.rl_bucket] = bucket_lock.bucket_hash self.ratelimit_locks[bucket_lock.bucket_hash] = bucket_lock async def request( self,", "lock: try: await self.global_lock.rate_limit() # prevent us exceeding the global", "delay.\"\"\" self.unlock_on_exit = False await asyncio.sleep(self.delta) self.unlock() async def __aenter__(self)", "with self.__session.request(route.method, route.url, **kwargs) as response: result = await response_decode(response)", "handles the interaction with discords http endpoints.\"\"\" import asyncio import", "can attempt to avoid them # so long as these", "self._lock.release() def ingest_ratelimit_header(self, header: CIMultiDictProxy) -> None: \"\"\" Ingests a", "dict_filter_missing from dis_snek.models import CooldownSystem from .route import Route __all__", "Forbidden(response, response_data=result, route=route) elif response.status == 404: raise NotFound(response, response_data=result,", "self.request(Route(\"GET\", \"/users/@me\")) except HTTPException as e: if e.status == 401:", "__init__(self) -> None: self.cooldown_system: CooldownSystem = CooldownSystem( 45, 1 )", "FormData from multidict import CIMultiDictProxy from dis_snek.api.http.http_requests import ( BotRequests,", "for {result.get('retry_after')} seconds\" ) await self.global_lock.lock(float(result.get(\"retry_after\"))) continue else: # 429's", "for a given duration. Args: delta: The time to keep", "__repr__(self) -> str: return f\"<BucketLock: {self.bucket_hash or 'Generic'}>\" @property def", "acquired \"\"\" await self._lock.acquire() await asyncio.sleep(delta) self._lock.release() class BucketLock: \"\"\"Manages", "but continue processing the current response elif response.status in {500,", "validates the token and grabs user data. parameters: token: the", "Absent[Union[dict, FormData]] = MISSING, reason: Absent[str] = MISSING, **kwargs: Dict[str,", "route, result) log.debug( f\"{route.endpoint} Received {response.status} :: [{lock.remaining}/{lock.limit} calls remaining]\"", "from dis_snek.models import CooldownSystem from .route import Route __all__ =", "= asyncio.Lock() self.unlock_on_exit: bool = True self.bucket_hash: Optional[str] = None", "dict) else x for x in data] elif isinstance(data, dict):", "await asyncio.sleep(1 + attempt * 2) continue raise async def", "route: Route, header: CIMultiDictProxy, bucket_lock: BucketLock) -> None: \"\"\" Ingests", "continue raise async def _raise_exception(self, response, route, result): log.error(f\"{route.method}::{route.url}: {response.status}\")", "ReactionRequests, StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ) from dis_snek.client.const import", "question bucket_lock: The rate limit bucket for this route \"\"\"", "data: Absent[Union[dict, FormData]] = MISSING, reason: Absent[str] = MISSING, **kwargs:", "data: dict = await self.request(Route(\"GET\", \"/gateway\")) except HTTPException as exc:", "None: \"\"\"Unlocks the BucketLock after a specified delay.\"\"\" self.unlock_on_exit =", "ClientSession(connector=self.connector) self.token = token try: return await self.request(Route(\"GET\", \"/users/@me\")) except", "lock acquired \"\"\" await self._lock.acquire() await asyncio.sleep(delta) self._lock.release() class BucketLock:", "asyncio.sleep(1 + attempt * 2) continue if not 300 >", "-> ClientWebSocketResponse: \"\"\" Connect to the websocket. parameters: url: the", "defer_unlock(self) -> None: \"\"\"Unlocks the BucketLock after a specified delay.\"\"\"", "504}: # Server issues, retry log.warning( f\"{route.endpoint} Received {response.status}... retrying", "self.bucket_hash = header.get(\"x-ratelimit-bucket\") self.limit = int(header.get(\"x-ratelimit-limit\") or -1) self.remaining =", "MISSING self.token: Optional[str] = None self.global_lock: GlobalLock = GlobalLock() self._max_attempts:", "( BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests, MemberRequests, MessageRequests, ReactionRequests, StickerRequests,", "_uriquote from weakref import WeakValueDictionary import aiohttp from aiohttp import", "in the cache # return that lock return lock #", "self.ingest_ratelimit(route, response.headers, lock) if response.status == 429: # ratelimit exceeded", "buckethash # otherwise a brand-new bucket lock will be returned", "but doesn't wait for completion.\"\"\" self.unlock_on_exit = False loop =", "until reset log.debug( f\"{route.endpoint} Has exhausted its ratelimit ({lock.limit})! Locking", "= int(header.get(\"x-ratelimit-limit\") or -1) self.remaining = int(header.get(\"x-ratelimit-remaining\") or -1) self.delta", "f\"{route.endpoint} Has exhausted its ratelimit ({lock.limit})! Locking route for {lock.delta}", ") -> Any: \"\"\" Make a request to discord. parameters:", "route, result): log.error(f\"{route.method}::{route.url}: {response.status}\") if response.status == 403: raise Forbidden(response,", "weakref import WeakValueDictionary import aiohttp from aiohttp import BaseConnector, ClientSession,", "await self._lock.acquire() async def __aexit__(self, *args) -> None: if self.unlock_on_exit", "kwargs[\"json\"] = dict_filter_missing(data) elif isinstance(data, FormData): kwargs[\"data\"] = data lock", "await self._lock.acquire() await asyncio.sleep(delta) self._lock.release() class BucketLock: \"\"\"Manages the ratelimit", "this endpoint has been used before, it will get an", "Optional, Union from urllib.parse import quote as _uriquote from weakref", "before, it will get an existing ratelimit for the respective", "it'll still be in the cache # return that lock", "and not self.__session.closed: self.loop.run_until_complete(self.__session.close()) def get_ratelimit(self, route: Route) -> BucketLock:", "kwargs[\"headers\"] = {\"User-Agent\": self.user_agent} if self.token: kwargs[\"headers\"][\"Authorization\"] = f\"Bot {self.token}\"", "quote as _uriquote from weakref import WeakValueDictionary import aiohttp from", "dis_snek.api.http.http_requests import ( BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests, MemberRequests, MessageRequests,", "route and wait for unlock continue elif lock.remaining == 0:", "CooldownSystem from .route import Route __all__ = [\"HTTPClient\"] log =", "return that lock return lock # if no cached lock", "given duration. Args: delta: The time to keep the lock", "self._endpoints.get(route.rl_bucket): # we have seen this route before, we know", "has been set (ignores unlimited endpoints) log.debug(f\"Caching ingested rate limit", "unavoidable, but we can attempt to avoid them # so", "MemberRequests, MessageRequests, ReactionRequests, StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ): \"\"\"A", "MISSING): kwargs[\"headers\"][\"X-Audit-Log-Reason\"] = _uriquote(reason, safe=\"/ \") if isinstance(data, (list, dict)):", "# so long as these are infrequent we're doing well", "the current response elif response.status in {500, 502, 504}: #", "DiscordError(response, response_data=result, route=route) else: raise HTTPException(response, response_data=result, route=route) async def", "DiscordError, Forbidden, GatewayNotFound, HTTPException, NotFound, LoginError from dis_snek.client.utils.input_utils import response_decode", "\"\"\"Manages the ratelimit for each bucket\"\"\" def __init__(self) -> None:", "unlock(self) -> None: \"\"\"Unlock this bucket.\"\"\" self._lock.release() def ingest_ratelimit_header(self, header:", "as these are infrequent we're doing well log.warning( f\"{route.endpoint} Has", "exc return \"{0}?encoding={1}&v=9&compress=zlib-stream\".format(data[\"url\"], \"json\") async def websocket_connect(self, url: str) ->", "# ratelimit exceeded if result.get(\"global\", False): # if we get", "str) -> ClientWebSocketResponse: \"\"\" Connect to the websocket. parameters: url:", "the lock acquired \"\"\" await self._lock.acquire() await asyncio.sleep(delta) self._lock.release() class", "with lock: try: await self.global_lock.rate_limit() # prevent us exceeding the", "import __py_version__, __repo_url__, __version__, logger_name, MISSING, Absent from dis_snek.client.errors import", "a request to discord. parameters: route: The route to take", "data] elif isinstance(data, dict): kwargs[\"json\"] = dict_filter_missing(data) elif isinstance(data, FormData):", "response.read() await self._raise_exception(response, asset, await response_decode(response)) async def login(self, token:", "the BucketLock after a specified delay.\"\"\" self.unlock_on_exit = False await", "headers kwargs[\"headers\"] = {\"User-Agent\": self.user_agent} if self.token: kwargs[\"headers\"][\"Authorization\"] = f\"Bot", "\") if isinstance(data, (list, dict)): kwargs[\"headers\"][\"Content-Type\"] = \"application/json\" # sanity", "\"\"\"A http client for sending requests to the Discord API.\"\"\"", "Reset in {lock.delta} seconds\" ) await lock.defer_unlock() # lock this", "e raise async def close(self) -> None: \"\"\"Close the session.\"\"\"", "rate limit bucket. Args: route: The route to fetch the", "not 300 > response.status >= 200: await self._raise_exception(response, route, result)", "await self._raise_exception(response, asset, await response_decode(response)) async def login(self, token: str)", "use 45 self._lock: asyncio.Lock = asyncio.Lock() async def rate_limit(self) ->", "gateway url.\"\"\" try: data: dict = await self.request(Route(\"GET\", \"/gateway\")) except", "to discord. parameters: route: The route to take json: A", "300 > response.status >= 200: await self._raise_exception(response, route, result) log.debug(", "import CooldownSystem from .route import Route __all__ = [\"HTTPClient\"] log", "# if no cached lock exists, return a new lock", "not in (None, MISSING): kwargs[\"headers\"][\"X-Audit-Log-Reason\"] = _uriquote(reason, safe=\"/ \") if", "async def rate_limit(self) -> None: async with self._lock: while not", "get_gateway(self) -> str: \"\"\"Get the gateway url.\"\"\" try: data: dict", "token try: return await self.request(Route(\"GET\", \"/users/@me\")) except HTTPException as e:", "asyncio.Lock = asyncio.Lock() self.unlock_on_exit: bool = True self.bucket_hash: Optional[str] =", "ClientSession, ClientWebSocketResponse, FormData from multidict import CIMultiDictProxy from dis_snek.api.http.http_requests import", "float(header.get(\"x-ratelimit-reset-after\", 0.0)) async def blind_defer_unlock(self) -> None: \"\"\"Unlocks the BucketLock", "\"\"\"Unlock this bucket.\"\"\" self._lock.release() def ingest_ratelimit_header(self, header: CIMultiDictProxy) -> None:", "session.\"\"\" if self.__session: await self.__session.close() async def get_gateway(self) -> str:", "BaseConnector, ClientSession, ClientWebSocketResponse, FormData from multidict import CIMultiDictProxy from dis_snek.api.http.http_requests", "-> None: \"\"\"Unlocks the BucketLock after a specified delay.\"\"\" self.unlock_on_exit", "class GlobalLock: \"\"\"Manages the global ratelimit\"\"\" def __init__(self) -> None:", "-> bool: \"\"\"Return True if lock is acquired.\"\"\" return self._lock.locked()", "CooldownSystem( 45, 1 ) # global rate-limit is 50 per", "it's ratelimit ({lock.limit})! Reset in {lock.delta} seconds\" ) await lock.defer_unlock()", "( f\"DiscordBot ({__repo_url__} {__version__} Python/{__py_version__}) aiohttp/{aiohttp.__version__}\" ) def __del__(self): if", "= [dict_filter_missing(x) if isinstance(x, dict) else x for x in", "route to fetch the ratelimit bucket for Returns: The BucketLock", "route=route) elif response.status == 404: raise NotFound(response, response_data=result, route=route) elif", "parameters: token: the token to use returns: The currently logged", "-> None: if self.unlock_on_exit and self._lock.locked(): self.unlock() self.unlock_on_exit = True", "self.ratelimit_locks[bucket_lock.bucket_hash] = bucket_lock async def request( self, route: Route, data:", "= data lock = self.get_ratelimit(route) # this gets a BucketLock", "e.status == 401: raise LoginError(\"An improper token was passed\") from", ") await lock.blind_defer_unlock() # lock this route, but continue processing", "if attempt < self._max_attempts - 1 and e.errno in (54,", "so long as these are infrequent we're doing well log.warning(", "await self.request(Route(\"GET\", \"/gateway\")) except HTTPException as exc: raise GatewayNotFound from", "a token log.error( f\"Bot has exceeded global ratelimit, locking REST", "but we can attempt to avoid them # so long", "401: raise LoginError(\"An improper token was passed\") from e raise", "None: \"\"\"Unlocks the BucketLock but doesn't wait for completion.\"\"\" self.unlock_on_exit", "GlobalLock() self._max_attempts: int = 3 self.ratelimit_locks: WeakValueDictionary[str, BucketLock] = WeakValueDictionary()", "-> BucketLock: \"\"\" Get a route's rate limit bucket. Args:", "new lock return BucketLock() def ingest_ratelimit(self, route: Route, header: CIMultiDictProxy,", "attempt * 2) continue raise async def _raise_exception(self, response, route,", "self.loop = asyncio.get_event_loop() if loop is None else loop self.__session:", "data for: {bucket_lock.bucket_hash}\") self._endpoints[route.rl_bucket] = bucket_lock.bucket_hash self.ratelimit_locks[bucket_lock.bucket_hash] = bucket_lock async", "< self._max_attempts - 1 and e.errno in (54, 10054): await", "= _uriquote(reason, safe=\"/ \") if isinstance(data, (list, dict)): kwargs[\"headers\"][\"Content-Type\"] =", "response: if response.status == 200: return await response.read() await self._raise_exception(response,", "async def __aexit__(self, *args) -> None: if self.unlock_on_exit and self._lock.locked():", "UserRequests, WebhookRequests, ScheduledEventsRequests, ) from dis_snek.client.const import __py_version__, __repo_url__, __version__,", "BucketLock] = WeakValueDictionary() self._endpoints = {} self.user_agent: str = (", "None: \"\"\"Close the session.\"\"\" if self.__session: await self.__session.close() async def", "completion.\"\"\" self.unlock_on_exit = False loop = asyncio.get_running_loop() loop.call_later(self.delta, self.unlock) async", "that's pretty bad, this would usually happen if the user", "-> str: \"\"\"Get the gateway url.\"\"\" try: data: dict =", "Ingests a discord rate limit header to configure this bucket", "-> None: \"\"\"Close the session.\"\"\" if self.__session: await self.__session.close() async", "dis_snek.client.const import __py_version__, __repo_url__, __version__, logger_name, MISSING, Absent from dis_snek.client.errors", "from dis_snek.client.utils.input_utils import response_decode from dis_snek.client.utils.serializer import dict_filter_missing from dis_snek.models", "try: await self.global_lock.rate_limit() # prevent us exceeding the global rate", "get a global, that's pretty bad, this would usually happen", "response_decode(response) self.ingest_ratelimit(route, response.headers, lock) if response.status == 429: # ratelimit", "the global rate limit by throttling http requests if self.__session.closed:", "is None else loop self.__session: Absent[Optional[ClientSession]] = MISSING self.token: Optional[str]", "_raise_exception(self, response, route, result): log.error(f\"{route.method}::{route.url}: {response.status}\") if response.status == 403:", "__py_version__, __repo_url__, __version__, logger_name, MISSING, Absent from dis_snek.client.errors import DiscordError,", "FormData]] = MISSING, reason: Absent[str] = MISSING, **kwargs: Dict[str, Any],", "= {} self.user_agent: str = ( f\"DiscordBot ({__repo_url__} {__version__} Python/{__py_version__})", "str) -> dict: \"\"\" \"Login\" to the gateway, basically validates", "BucketLock: \"\"\"Manages the ratelimit for each bucket\"\"\" def __init__(self) ->", "self.unlock_on_exit = True class HTTPClient( BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests,", "try: return await self.request(Route(\"GET\", \"/users/@me\")) except HTTPException as e: if", "await self.request(Route(\"GET\", \"/users/@me\")) except HTTPException as e: if e.status ==", "aiohttp/{aiohttp.__version__}\" ) def __del__(self): if self.__session and not self.__session.closed: self.loop.run_until_complete(self.__session.close())", "header in question bucket_lock: The rate limit bucket for this", "except OSError as e: if attempt < self._max_attempts - 1", "a new lock return BucketLock() def ingest_ratelimit(self, route: Route, header:", "for this route \"\"\" if bucket_hash := self._endpoints.get(route.rl_bucket): # we", "f\"<BucketLock: {self.bucket_hash or 'Generic'}>\" @property def locked(self) -> bool: \"\"\"Return", "isinstance(data, dict): kwargs[\"json\"] = dict_filter_missing(data) elif isinstance(data, FormData): kwargs[\"data\"] =", "dict_filter_missing(data) elif isinstance(data, FormData): kwargs[\"data\"] = data lock = self.get_ratelimit(route)", "f\"Bot has exceeded global ratelimit, locking REST API for {result.get('retry_after')}", "bucket hash has been set (ignores unlimited endpoints) log.debug(f\"Caching ingested", "if response.status == 403: raise Forbidden(response, response_data=result, route=route) elif response.status", "dict: \"\"\" \"Login\" to the gateway, basically validates the token", "rate-limit is 50 per second, conservatively we use 45 self._lock:", "clients sharing a token log.error( f\"Bot has exceeded global ratelimit,", "if loop is None else loop self.__session: Absent[Optional[ClientSession]] = MISSING", "be in the cache # return that lock return lock", "a ratelimit header from discord to determine ratelimit. Args: route:", "http requests if self.__session.closed: await self.login(self.token) async with self.__session.request(route.method, route.url,", "({lock.limit})! Reset in {lock.delta} seconds\" ) await lock.defer_unlock() # lock", "def login(self, token: str) -> dict: \"\"\" \"Login\" to the", "MISSING, Absent from dis_snek.client.errors import DiscordError, Forbidden, GatewayNotFound, HTTPException, NotFound,", "-> None: \"\"\"Unlock this bucket.\"\"\" self._lock.release() def ingest_ratelimit_header(self, header: CIMultiDictProxy)", "__version__, logger_name, MISSING, Absent from dis_snek.client.errors import DiscordError, Forbidden, GatewayNotFound,", "asyncio.get_event_loop() if loop is None else loop self.__session: Absent[Optional[ClientSession]] =", "Args: delta: The time to keep the lock acquired \"\"\"", "from dis_snek.client.errors import DiscordError, Forbidden, GatewayNotFound, HTTPException, NotFound, LoginError from", "= None, loop: Optional[asyncio.AbstractEventLoop] = None): self.connector: Optional[BaseConnector] = connector", "= MISSING self.token: Optional[str] = None self.global_lock: GlobalLock = GlobalLock()", "\"\"\" Make a request to discord. parameters: route: The route", "attempt < self._max_attempts - 1 and e.errno in (54, 10054):", "route, it'll still be in the cache # return that", "seen this route before, we know which bucket it is", "exc: raise GatewayNotFound from exc return \"{0}?encoding={1}&v=9&compress=zlib-stream\".format(data[\"url\"], \"json\") async def", "that lock return lock # if no cached lock exists,", "WebhookRequests, ScheduledEventsRequests, ) from dis_snek.client.const import __py_version__, __repo_url__, __version__, logger_name,", "header from discord to determine ratelimit. Args: route: The route", "the ratelimit bucket for Returns: The BucketLock object for this", "issues, retry log.warning( f\"{route.endpoint} Received {response.status}... retrying in {1 +", "raise GatewayNotFound from exc return \"{0}?encoding={1}&v=9&compress=zlib-stream\".format(data[\"url\"], \"json\") async def websocket_connect(self,", "self._lock.acquire() async def __aexit__(self, *args) -> None: if self.unlock_on_exit and", "bytes: log.debug(f\"{asset} requests {url} from CDN\") async with self.__session.get(url) as", "\"\"\"This file handles the interaction with discords http endpoints.\"\"\" import", "ratelimit\"\"\" def __init__(self) -> None: self.cooldown_system: CooldownSystem = CooldownSystem( 45,", "{self.bucket_hash or 'Generic'}>\" @property def locked(self) -> bool: \"\"\"Return True", "the BucketLock but doesn't wait for completion.\"\"\" self.unlock_on_exit = False", "ratelimit, locking REST API for {result.get('retry_after')} seconds\" ) await self.global_lock.lock(float(result.get(\"retry_after\")))", "Optional[BaseConnector] = None, loop: Optional[asyncio.AbstractEventLoop] = None): self.connector: Optional[BaseConnector] =", "Received {response.status} :: [{lock.remaining}/{lock.limit} calls remaining]\" ) return result except", "the gateway url.\"\"\" try: data: dict = await self.request(Route(\"GET\", \"/gateway\"))", "response.status == 403: raise Forbidden(response, response_data=result, route=route) elif response.status ==", "import dict_filter_missing from dis_snek.models import CooldownSystem from .route import Route", "WeakValueDictionary[str, BucketLock] = WeakValueDictionary() self._endpoints = {} self.user_agent: str =", "async def get_gateway(self) -> str: \"\"\"Get the gateway url.\"\"\" try:", "self.loop.run_until_complete(self.__session.close()) def get_ratelimit(self, route: Route) -> BucketLock: \"\"\" Get a", "lock # if no cached lock exists, return a new", "self._max_attempts: int = 3 self.ratelimit_locks: WeakValueDictionary[str, BucketLock] = WeakValueDictionary() self._endpoints", "kwargs[\"headers\"][\"Authorization\"] = f\"Bot {self.token}\" if reason not in (None, MISSING):", "json payload to send in the request reason: Attach a", "\"\"\"Unlocks the BucketLock but doesn't wait for completion.\"\"\" self.unlock_on_exit =", "not self.__session.closed: self.loop.run_until_complete(self.__session.close()) def get_ratelimit(self, route: Route) -> BucketLock: \"\"\"", "If this endpoint has been used before, it will get", "bool = True self.bucket_hash: Optional[str] = None self.limit: int =", "API for {result.get('retry_after')} seconds\" ) await self.global_lock.lock(float(result.get(\"retry_after\"))) continue else: #", "discords http endpoints.\"\"\" import asyncio import logging from typing import", "calls remaining]\" ) return result except OSError as e: if", "BucketLock but doesn't wait for completion.\"\"\" self.unlock_on_exit = False loop", "response_data=result, route=route) async def request_cdn(self, url, asset) -> bytes: log.debug(f\"{asset}", "dis_snek.models import CooldownSystem from .route import Route __all__ = [\"HTTPClient\"]", "route=route) async def request_cdn(self, url, asset) -> bytes: log.debug(f\"{asset} requests", "the global lock for a given duration. Args: delta: The", "data. parameters: token: the token to use returns: The currently", "StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ) from dis_snek.client.const import __py_version__,", "def rate_limit(self) -> None: async with self._lock: while not self.cooldown_system.acquire_token():", "Lock the global lock for a given duration. Args: delta:", "from .route import Route __all__ = [\"HTTPClient\"] log = logging.getLogger(logger_name)", "limit header in question bucket_lock: The rate limit bucket for", "discord to determine ratelimit. Args: route: The route we're ingesting", "import ( BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests, MemberRequests, MessageRequests, ReactionRequests,", "Any], ) -> Any: \"\"\" Make a request to discord.", "# lock this route, but continue processing the current response", "__init__(self, connector: Optional[BaseConnector] = None, loop: Optional[asyncio.AbstractEventLoop] = None): self.connector:", "self.__session.closed: await self.login(self.token) async with self.__session.request(route.method, route.url, **kwargs) as response:", "async def lock(self, delta: float) -> None: \"\"\" Lock the", "parameters: route: The route to take json: A json payload", "False): # if we get a global, that's pretty bad,", "log.error(f\"{route.method}::{route.url}: {response.status}\") if response.status == 403: raise Forbidden(response, response_data=result, route=route)", "cache # return that lock return lock # if no", "this route, it'll still be in the cache # return", "from 2 clients sharing a token log.error( f\"Bot has exceeded", "on this route, it'll still be in the cache #", "endpoints) log.debug(f\"Caching ingested rate limit data for: {bucket_lock.bucket_hash}\") self._endpoints[route.rl_bucket] =", "2 clients sharing a token log.error( f\"Bot has exceeded global", "2} seconds\" ) await asyncio.sleep(1 + attempt * 2) continue", "response.status == 200: return await response.read() await self._raise_exception(response, asset, await", "raise LoginError(\"An improper token was passed\") from e raise async", "unfortunately unavoidable, but we can attempt to avoid them #", "lock return lock # if no cached lock exists, return", "this route, but continue processing the current response elif response.status", "as e: if e.status == 401: raise LoginError(\"An improper token", "class BucketLock: \"\"\"Manages the ratelimit for each bucket\"\"\" def __init__(self)", "in the bucket, lock until reset log.debug( f\"{route.endpoint} Has exhausted", "[{lock.remaining}/{lock.limit} calls remaining]\" ) return result except OSError as e:", "loop.call_later(self.delta, self.unlock) async def defer_unlock(self) -> None: \"\"\"Unlocks the BucketLock", "lock this route, but continue processing the current response elif", "respective buckethash # otherwise a brand-new bucket lock will be", "usually happen if the user is hitting the api from", "ingesting ratelimit for header: The rate limit header in question", "for each bucket\"\"\" def __init__(self) -> None: self._lock: asyncio.Lock =", "lock for a given duration. Args: delta: The time to", "route \"\"\" bucket_lock.ingest_ratelimit_header(header) if bucket_lock.bucket_hash: # We only ever try", "route.url, **kwargs) as response: result = await response_decode(response) self.ingest_ratelimit(route, response.headers,", "ratelimit header from discord to determine ratelimit. Args: route: The", "HTTPException(response, response_data=result, route=route) async def request_cdn(self, url, asset) -> bytes:", "= float(header.get(\"x-ratelimit-reset-after\", 0.0)) async def blind_defer_unlock(self) -> None: \"\"\"Unlocks the", "def blind_defer_unlock(self) -> None: \"\"\"Unlocks the BucketLock but doesn't wait", "CIMultiDictProxy, bucket_lock: BucketLock) -> None: \"\"\" Ingests a ratelimit header", "Route, data: Absent[Union[dict, FormData]] = MISSING, reason: Absent[str] = MISSING,", "avoid them # so long as these are infrequent we're", "self.remaining = int(header.get(\"x-ratelimit-remaining\") or -1) self.delta = float(header.get(\"x-ratelimit-reset-after\", 0.0)) async", "self.delta: float = 0.0 def __repr__(self) -> str: return f\"<BucketLock:", "= connector self.loop = asyncio.get_event_loop() if loop is None else", "route to take json: A json payload to send in", "duration. Args: delta: The time to keep the lock acquired", "file handles the interaction with discords http endpoints.\"\"\" import asyncio", "{response.status}... retrying in {1 + attempt * 2} seconds\" )", "* 2) continue if not 300 > response.status >= 200:", "import response_decode from dis_snek.client.utils.serializer import dict_filter_missing from dis_snek.models import CooldownSystem", "The route to fetch the ratelimit bucket for Returns: The", "None, loop: Optional[asyncio.AbstractEventLoop] = None): self.connector: Optional[BaseConnector] = connector self.loop", "self.user_agent: str = ( f\"DiscordBot ({__repo_url__} {__version__} Python/{__py_version__}) aiohttp/{aiohttp.__version__}\" )", "a global, that's pretty bad, this would usually happen if", "bucket_lock.ingest_ratelimit_header(header) if bucket_lock.bucket_hash: # We only ever try and cache", "its ratelimit ({lock.limit})! Locking route for {lock.delta} seconds\" ) await", "route=route) elif response.status >= 500: raise DiscordError(response, response_data=result, route=route) else:", "multidict import CIMultiDictProxy from dis_snek.api.http.http_requests import ( BotRequests, ChannelRequests, EmojiRequests,", "OSError as e: if attempt < self._max_attempts - 1 and", "by throttling http requests if self.__session.closed: await self.login(self.token) async with", "Has exhausted its ratelimit ({lock.limit})! Locking route for {lock.delta} seconds\"", "attempt * 2} seconds\" ) await asyncio.sleep(1 + attempt *", "def __del__(self): if self.__session and not self.__session.closed: self.loop.run_until_complete(self.__session.close()) def get_ratelimit(self,", "global rate limit by throttling http requests if self.__session.closed: await", "GlobalLock: \"\"\"Manages the global ratelimit\"\"\" def __init__(self) -> None: self.cooldown_system:", "Any, Dict, Optional, Union from urllib.parse import quote as _uriquote", "None: if self.unlock_on_exit and self._lock.locked(): self.unlock() self.unlock_on_exit = True class", "{response.status} :: [{lock.remaining}/{lock.limit} calls remaining]\" ) return result except OSError", "log.debug(f\"{asset} requests {url} from CDN\") async with self.__session.get(url) as response:", "or -1) self.delta = float(header.get(\"x-ratelimit-reset-after\", 0.0)) async def blind_defer_unlock(self) ->", "attempt in range(self._max_attempts): async with lock: try: await self.global_lock.rate_limit() #", "[dict_filter_missing(x) if isinstance(x, dict) else x for x in data]", "from typing import Any, Dict, Optional, Union from urllib.parse import", "response.status == 404: raise NotFound(response, response_data=result, route=route) elif response.status >=", "\"\"\" self.__session = ClientSession(connector=self.connector) self.token = token try: return await", "attempt to avoid them # so long as these are", "None else loop self.__session: Absent[Optional[ClientSession]] = MISSING self.token: Optional[str] =", "ingested rate limit data for: {bucket_lock.bucket_hash}\") self._endpoints[route.rl_bucket] = bucket_lock.bucket_hash self.ratelimit_locks[bucket_lock.bucket_hash]", "= await self.request(Route(\"GET\", \"/gateway\")) except HTTPException as exc: raise GatewayNotFound", "bool: \"\"\"Return True if lock is acquired.\"\"\" return self._lock.locked() def", "safe=\"/ \") if isinstance(data, (list, dict)): kwargs[\"headers\"][\"Content-Type\"] = \"application/json\" #", "\"application/json\" # sanity check payload if isinstance(data, list): kwargs[\"json\"] =", "loop: Optional[asyncio.AbstractEventLoop] = None): self.connector: Optional[BaseConnector] = connector self.loop =", "lock is acquired.\"\"\" return self._lock.locked() def unlock(self) -> None: \"\"\"Unlock", "header: CIMultiDictProxy) -> None: \"\"\" Ingests a discord rate limit", ":: [{lock.remaining}/{lock.limit} calls remaining]\" ) return result except OSError as", "None: \"\"\" Ingests a discord rate limit header to configure", "for completion.\"\"\" self.unlock_on_exit = False loop = asyncio.get_running_loop() loop.call_later(self.delta, self.unlock)", "ratelimit for each bucket\"\"\" def __init__(self) -> None: self._lock: asyncio.Lock", "elif isinstance(data, dict): kwargs[\"json\"] = dict_filter_missing(data) elif isinstance(data, FormData): kwargs[\"data\"]", "ClientWebSocketResponse, FormData from multidict import CIMultiDictProxy from dis_snek.api.http.http_requests import (", "e: if attempt < self._max_attempts - 1 and e.errno in", "exceeded global ratelimit, locking REST API for {result.get('retry_after')} seconds\" )", "to determine ratelimit. Args: route: The route we're ingesting ratelimit", "-1) self.remaining = int(header.get(\"x-ratelimit-remaining\") or -1) self.delta = float(header.get(\"x-ratelimit-reset-after\", 0.0))", "no cached lock exists, return a new lock return BucketLock()", "self._max_attempts - 1 and e.errno in (54, 10054): await asyncio.sleep(1", "in (54, 10054): await asyncio.sleep(1 + attempt * 2) continue", "the bucket, lock until reset log.debug( f\"{route.endpoint} Has exhausted its", "# lock this route and wait for unlock continue elif", "route: Route, data: Absent[Union[dict, FormData]] = MISSING, reason: Absent[str] =", "CDN\") async with self.__session.get(url) as response: if response.status == 200:", "return self._lock.locked() def unlock(self) -> None: \"\"\"Unlock this bucket.\"\"\" self._lock.release()", "the session.\"\"\" if self.__session: await self.__session.close() async def get_gateway(self) ->", "500: raise DiscordError(response, response_data=result, route=route) else: raise HTTPException(response, response_data=result, route=route)", "for this route. # If this endpoint has been used", "if lock is acquired.\"\"\" return self._lock.locked() def unlock(self) -> None:", "Route, header: CIMultiDictProxy, bucket_lock: BucketLock) -> None: \"\"\" Ingests a", "has been used before, it will get an existing ratelimit", "NotFound(response, response_data=result, route=route) elif response.status >= 500: raise DiscordError(response, response_data=result,", "global ratelimit\"\"\" def __init__(self) -> None: self.cooldown_system: CooldownSystem = CooldownSystem(", "if e.status == 401: raise LoginError(\"An improper token was passed\")", "from e raise async def close(self) -> None: \"\"\"Close the", "url.\"\"\" try: data: dict = await self.request(Route(\"GET\", \"/gateway\")) except HTTPException", "the Discord API.\"\"\" def __init__(self, connector: Optional[BaseConnector] = None, loop:", "dis_snek.client.utils.serializer import dict_filter_missing from dis_snek.models import CooldownSystem from .route import", "this bucket.\"\"\" self._lock.release() def ingest_ratelimit_header(self, header: CIMultiDictProxy) -> None: \"\"\"", "ratelimit ({lock.limit})! Locking route for {lock.delta} seconds\" ) await lock.blind_defer_unlock()", "response.status >= 200: await self._raise_exception(response, route, result) log.debug( f\"{route.endpoint} Received", "__init__(self) -> None: self._lock: asyncio.Lock = asyncio.Lock() self.unlock_on_exit: bool =", "lock exists, return a new lock return BucketLock() def ingest_ratelimit(self,", "if not 300 > response.status >= 200: await self._raise_exception(response, route,", "if response.status == 200: return await response.read() await self._raise_exception(response, asset,", "if isinstance(data, list): kwargs[\"json\"] = [dict_filter_missing(x) if isinstance(x, dict) else", "retry log.warning( f\"{route.endpoint} Received {response.status}... retrying in {1 + attempt", "if response.status == 429: # ratelimit exceeded if result.get(\"global\", False):", "MessageRequests, ReactionRequests, StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ): \"\"\"A http", "10054): await asyncio.sleep(1 + attempt * 2) continue raise async", "class HTTPClient( BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests, MemberRequests, MessageRequests, ReactionRequests,", "def unlock(self) -> None: \"\"\"Unlock this bucket.\"\"\" self._lock.release() def ingest_ratelimit_header(self,", "if bucket_hash := self._endpoints.get(route.rl_bucket): # we have seen this route", "str = ( f\"DiscordBot ({__repo_url__} {__version__} Python/{__py_version__}) aiohttp/{aiohttp.__version__}\" ) def", "cache the bucket if the bucket hash has been set", "rate limit data for: {bucket_lock.bucket_hash}\") self._endpoints[route.rl_bucket] = bucket_lock.bucket_hash self.ratelimit_locks[bucket_lock.bucket_hash] =", "ScheduledEventsRequests, ): \"\"\"A http client for sending requests to the", "request( self, route: Route, data: Absent[Union[dict, FormData]] = MISSING, reason:", "otherwise a brand-new bucket lock will be returned for attempt", "429: # ratelimit exceeded if result.get(\"global\", False): # if we", "FormData): kwargs[\"data\"] = data lock = self.get_ratelimit(route) # this gets", "sending requests to the Discord API.\"\"\" def __init__(self, connector: Optional[BaseConnector]", "login(self, token: str) -> dict: \"\"\" \"Login\" to the gateway,", "-> None: \"\"\" Ingests a ratelimit header from discord to", "asset) -> bytes: log.debug(f\"{asset} requests {url} from CDN\") async with", "# prevent us exceeding the global rate limit by throttling", "def defer_unlock(self) -> None: \"\"\"Unlocks the BucketLock after a specified", "self.global_lock.lock(float(result.get(\"retry_after\"))) continue else: # 429's are unfortunately unavoidable, but we", "used for audit logs \"\"\" # Assemble headers kwargs[\"headers\"] =", "response.headers, lock) if response.status == 429: # ratelimit exceeded if", "__repo_url__, __version__, logger_name, MISSING, Absent from dis_snek.client.errors import DiscordError, Forbidden,", "connect to \"\"\" return await self.__session.ws_connect( url, timeout=30, max_msg_size=0, autoclose=False,", "def __aexit__(self, *args) -> None: if self.unlock_on_exit and self._lock.locked(): self.unlock()", "this route and wait for unlock continue elif lock.remaining ==", "\"\"\" Get a route's rate limit bucket. Args: route: The", "delta: The time to keep the lock acquired \"\"\" await", "as response: if response.status == 200: return await response.read() await", "self.__session and not self.__session.closed: self.loop.run_until_complete(self.__session.close()) def get_ratelimit(self, route: Route) ->", "connector: Optional[BaseConnector] = None, loop: Optional[asyncio.AbstractEventLoop] = None): self.connector: Optional[BaseConnector]", "async def request_cdn(self, url, asset) -> bytes: log.debug(f\"{asset} requests {url}", "def websocket_connect(self, url: str) -> ClientWebSocketResponse: \"\"\" Connect to the", "self.cooldown_system.acquire_token(): await asyncio.sleep(self.cooldown_system.get_cooldown_time()) async def lock(self, delta: float) -> None:", "(None, MISSING): kwargs[\"headers\"][\"X-Audit-Log-Reason\"] = _uriquote(reason, safe=\"/ \") if isinstance(data, (list,", "to fetch the ratelimit bucket for Returns: The BucketLock object", "determine ratelimit. Args: route: The route we're ingesting ratelimit for", "second, conservatively we use 45 self._lock: asyncio.Lock = asyncio.Lock() async", "MessageRequests, ReactionRequests, StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ) from dis_snek.client.const", "request reason: Attach a reason to this request, used for", "route=route) else: raise HTTPException(response, response_data=result, route=route) async def request_cdn(self, url,", "HTTPException as exc: raise GatewayNotFound from exc return \"{0}?encoding={1}&v=9&compress=zlib-stream\".format(data[\"url\"], \"json\")", "import WeakValueDictionary import aiohttp from aiohttp import BaseConnector, ClientSession, ClientWebSocketResponse,", "\"json\") async def websocket_connect(self, url: str) -> ClientWebSocketResponse: \"\"\" Connect", "async def close(self) -> None: \"\"\"Close the session.\"\"\" if self.__session:", "dict = await self.request(Route(\"GET\", \"/gateway\")) except HTTPException as exc: raise", "we get a global, that's pretty bad, this would usually", "200: await self._raise_exception(response, route, result) log.debug( f\"{route.endpoint} Received {response.status} ::", "import DiscordError, Forbidden, GatewayNotFound, HTTPException, NotFound, LoginError from dis_snek.client.utils.input_utils import", "CIMultiDictProxy) -> None: \"\"\" Ingests a discord rate limit header", "The BucketLock object for this route \"\"\" if bucket_hash :=", "in {lock.delta} seconds\" ) await lock.defer_unlock() # lock this route", "ingest_ratelimit_header(self, header: CIMultiDictProxy) -> None: \"\"\" Ingests a discord rate", "have an active lock on this route, it'll still be", "requests if self.__session.closed: await self.login(self.token) async with self.__session.request(route.method, route.url, **kwargs)", "if bucket_lock.bucket_hash: # We only ever try and cache the", "exceeded it's ratelimit ({lock.limit})! Reset in {lock.delta} seconds\" ) await", "token was passed\") from e raise async def close(self) ->", "\"{0}?encoding={1}&v=9&compress=zlib-stream\".format(data[\"url\"], \"json\") async def websocket_connect(self, url: str) -> ClientWebSocketResponse: \"\"\"", "global lock for a given duration. Args: delta: The time", "for unlock continue elif lock.remaining == 0: # Last call", "endpoints.\"\"\" import asyncio import logging from typing import Any, Dict,", "+ attempt * 2} seconds\" ) await asyncio.sleep(1 + attempt", "The currently logged in bot's data \"\"\" self.__session = ClientSession(connector=self.connector)", "# If this endpoint has been used before, it will", "which bucket it is associated with lock = self.ratelimit_locks.get(bucket_hash) if", "gets a BucketLock for this route. # If this endpoint", "only ever try and cache the bucket if the bucket", "\"\"\" return await self.__session.ws_connect( url, timeout=30, max_msg_size=0, autoclose=False, headers={\"User-Agent\": self.user_agent},", "result.get(\"global\", False): # if we get a global, that's pretty", "self.__session: Absent[Optional[ClientSession]] = MISSING self.token: Optional[str] = None self.global_lock: GlobalLock", "{self.token}\" if reason not in (None, MISSING): kwargs[\"headers\"][\"X-Audit-Log-Reason\"] = _uriquote(reason,", "self.get_ratelimit(route) # this gets a BucketLock for this route. #", "GuildRequests, InteractionRequests, MemberRequests, MessageRequests, ReactionRequests, StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests,", "it will get an existing ratelimit for the respective buckethash", "to avoid them # so long as these are infrequent", "ClientWebSocketResponse: \"\"\" Connect to the websocket. parameters: url: the url", "MemberRequests, MessageRequests, ReactionRequests, StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ) from", "for header: The rate limit header in question bucket_lock: The", "{bucket_lock.bucket_hash}\") self._endpoints[route.rl_bucket] = bucket_lock.bucket_hash self.ratelimit_locks[bucket_lock.bucket_hash] = bucket_lock async def request(", "to the Discord API.\"\"\" def __init__(self, connector: Optional[BaseConnector] = None,", "from exc return \"{0}?encoding={1}&v=9&compress=zlib-stream\".format(data[\"url\"], \"json\") async def websocket_connect(self, url: str)", "f\"DiscordBot ({__repo_url__} {__version__} Python/{__py_version__}) aiohttp/{aiohttp.__version__}\" ) def __del__(self): if self.__session", "{lock.delta} seconds\" ) await lock.defer_unlock() # lock this route and", "self.unlock_on_exit: bool = True self.bucket_hash: Optional[str] = None self.limit: int", "= MISSING, reason: Absent[str] = MISSING, **kwargs: Dict[str, Any], )", "or -1) self.remaining = int(header.get(\"x-ratelimit-remaining\") or -1) self.delta = float(header.get(\"x-ratelimit-reset-after\",", "urllib.parse import quote as _uriquote from weakref import WeakValueDictionary import", ".route import Route __all__ = [\"HTTPClient\"] log = logging.getLogger(logger_name) class", "-> str: return f\"<BucketLock: {self.bucket_hash or 'Generic'}>\" @property def locked(self)", "rate limit bucket for this route \"\"\" bucket_lock.ingest_ratelimit_header(header) if bucket_lock.bucket_hash:", "except HTTPException as e: if e.status == 401: raise LoginError(\"An", "we know which bucket it is associated with lock =", "seconds\" ) await lock.defer_unlock() # lock this route and wait", "as response: result = await response_decode(response) self.ingest_ratelimit(route, response.headers, lock) if", "to configure this bucket lock. Args: header: A header from", "throttling http requests if self.__session.closed: await self.login(self.token) async with self.__session.request(route.method,", ") # global rate-limit is 50 per second, conservatively we", "import Any, Dict, Optional, Union from urllib.parse import quote as", "def ingest_ratelimit(self, route: Route, header: CIMultiDictProxy, bucket_lock: BucketLock) -> None:", "set (ignores unlimited endpoints) log.debug(f\"Caching ingested rate limit data for:", "async with lock: try: await self.global_lock.rate_limit() # prevent us exceeding", "raise Forbidden(response, response_data=result, route=route) elif response.status == 404: raise NotFound(response,", "LoginError from dis_snek.client.utils.input_utils import response_decode from dis_snek.client.utils.serializer import dict_filter_missing from", "pretty bad, this would usually happen if the user is", "header to configure this bucket lock. Args: header: A header", "API.\"\"\" def __init__(self, connector: Optional[BaseConnector] = None, loop: Optional[asyncio.AbstractEventLoop] =", "the url to connect to \"\"\" return await self.__session.ws_connect( url,", "# sanity check payload if isinstance(data, list): kwargs[\"json\"] = [dict_filter_missing(x)", "this route. # If this endpoint has been used before,", "await response_decode(response) self.ingest_ratelimit(route, response.headers, lock) if response.status == 429: #", "the global ratelimit\"\"\" def __init__(self) -> None: self.cooldown_system: CooldownSystem =", "conservatively we use 45 self._lock: asyncio.Lock = asyncio.Lock() async def", "response.status == 429: # ratelimit exceeded if result.get(\"global\", False): #", "sharing a token log.error( f\"Bot has exceeded global ratelimit, locking", "asset, await response_decode(response)) async def login(self, token: str) -> dict:", "asyncio.sleep(self.delta) self.unlock() async def __aenter__(self) -> None: await self._lock.acquire() async", "\"\"\" \"Login\" to the gateway, basically validates the token and", "e.errno in (54, 10054): await asyncio.sleep(1 + attempt * 2)", "GatewayNotFound, HTTPException, NotFound, LoginError from dis_snek.client.utils.input_utils import response_decode from dis_snek.client.utils.serializer", "\"\"\" # Assemble headers kwargs[\"headers\"] = {\"User-Agent\": self.user_agent} if self.token:", "close(self) -> None: \"\"\"Close the session.\"\"\" if self.__session: await self.__session.close()", "each bucket\"\"\" def __init__(self) -> None: self._lock: asyncio.Lock = asyncio.Lock()", "the gateway, basically validates the token and grabs user data.", "bucket_lock: BucketLock) -> None: \"\"\" Ingests a ratelimit header from", "__aexit__(self, *args) -> None: if self.unlock_on_exit and self._lock.locked(): self.unlock() self.unlock_on_exit", "StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ): \"\"\"A http client for", "== 429: # ratelimit exceeded if result.get(\"global\", False): # if", "remaining]\" ) return result except OSError as e: if attempt", "passed\") from e raise async def close(self) -> None: \"\"\"Close", "log.debug(f\"Caching ingested rate limit data for: {bucket_lock.bucket_hash}\") self._endpoints[route.rl_bucket] = bucket_lock.bucket_hash", "await asyncio.sleep(self.delta) self.unlock() async def __aenter__(self) -> None: await self._lock.acquire()", "self.remaining: int = -1 self.delta: float = 0.0 def __repr__(self)", "async with self.__session.get(url) as response: if response.status == 200: return", "acquired.\"\"\" return self._lock.locked() def unlock(self) -> None: \"\"\"Unlock this bucket.\"\"\"", "bucket for this route \"\"\" bucket_lock.ingest_ratelimit_header(header) if bucket_lock.bucket_hash: # We", "= MISSING, **kwargs: Dict[str, Any], ) -> Any: \"\"\" Make", "returned for attempt in range(self._max_attempts): async with lock: try: await", "= asyncio.get_event_loop() if loop is None else loop self.__session: Absent[Optional[ClientSession]]", "if the bucket hash has been set (ignores unlimited endpoints)", "self.unlock_on_exit = False await asyncio.sleep(self.delta) self.unlock() async def __aenter__(self) ->", "in {500, 502, 504}: # Server issues, retry log.warning( f\"{route.endpoint}", "requests to the Discord API.\"\"\" def __init__(self, connector: Optional[BaseConnector] =", "kwargs[\"headers\"][\"Content-Type\"] = \"application/json\" # sanity check payload if isinstance(data, list):", "int = -1 self.remaining: int = -1 self.delta: float =", "ever try and cache the bucket if the bucket hash", "int(header.get(\"x-ratelimit-limit\") or -1) self.remaining = int(header.get(\"x-ratelimit-remaining\") or -1) self.delta =", ") await lock.defer_unlock() # lock this route and wait for", "with self.__session.get(url) as response: if response.status == 200: return await", "ReactionRequests, StickerRequests, ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ): \"\"\"A http client", "self.unlock_on_exit and self._lock.locked(): self.unlock() self.unlock_on_exit = True class HTTPClient( BotRequests,", "the cache # return that lock return lock # if", "\"Login\" to the gateway, basically validates the token and grabs", "sanity check payload if isinstance(data, list): kwargs[\"json\"] = [dict_filter_missing(x) if", "time to keep the lock acquired \"\"\" await self._lock.acquire() await", "in question bucket_lock: The rate limit bucket for this route", "ratelimit bucket for Returns: The BucketLock object for this route", "asyncio.sleep(1 + attempt * 2) continue raise async def _raise_exception(self,", "hash has been set (ignores unlimited endpoints) log.debug(f\"Caching ingested rate", "url: str) -> ClientWebSocketResponse: \"\"\" Connect to the websocket. parameters:", "): \"\"\"A http client for sending requests to the Discord", "def ingest_ratelimit_header(self, header: CIMultiDictProxy) -> None: \"\"\" Ingests a discord", "dis_snek.client.errors import DiscordError, Forbidden, GatewayNotFound, HTTPException, NotFound, LoginError from dis_snek.client.utils.input_utils", "The rate limit bucket for this route \"\"\" bucket_lock.ingest_ratelimit_header(header) if", "for attempt in range(self._max_attempts): async with lock: try: await self.global_lock.rate_limit()", "from dis_snek.api.http.http_requests import ( BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests, MemberRequests,", "'Generic'}>\" @property def locked(self) -> bool: \"\"\"Return True if lock", "in range(self._max_attempts): async with lock: try: await self.global_lock.rate_limit() # prevent", "and cache the bucket if the bucket hash has been", "**kwargs: Dict[str, Any], ) -> Any: \"\"\" Make a request", "lock(self, delta: float) -> None: \"\"\" Lock the global lock", "(54, 10054): await asyncio.sleep(1 + attempt * 2) continue raise", "str: return f\"<BucketLock: {self.bucket_hash or 'Generic'}>\" @property def locked(self) ->", "We only ever try and cache the bucket if the", "get_ratelimit(self, route: Route) -> BucketLock: \"\"\" Get a route's rate", "\"/users/@me\")) except HTTPException as e: if e.status == 401: raise", "Connect to the websocket. parameters: url: the url to connect", "if no cached lock exists, return a new lock return", "A json payload to send in the request reason: Attach", "list): kwargs[\"json\"] = [dict_filter_missing(x) if isinstance(x, dict) else x for", "response_data=result, route=route) elif response.status == 404: raise NotFound(response, response_data=result, route=route)", "BucketLock) -> None: \"\"\" Ingests a ratelimit header from discord", "logging from typing import Any, Dict, Optional, Union from urllib.parse", "UserRequests, WebhookRequests, ScheduledEventsRequests, ): \"\"\"A http client for sending requests", "response.status in {500, 502, 504}: # Server issues, retry log.warning(", "self.token = token try: return await self.request(Route(\"GET\", \"/users/@me\")) except HTTPException", "Args: header: A header from a http response \"\"\" self.bucket_hash", "Absent[Optional[ClientSession]] = MISSING self.token: Optional[str] = None self.global_lock: GlobalLock =", "to keep the lock acquired \"\"\" await self._lock.acquire() await asyncio.sleep(delta)", "= GlobalLock() self._max_attempts: int = 3 self.ratelimit_locks: WeakValueDictionary[str, BucketLock] =", "an existing ratelimit for the respective buckethash # otherwise a", "a http response \"\"\" self.bucket_hash = header.get(\"x-ratelimit-bucket\") self.limit = int(header.get(\"x-ratelimit-limit\")", "rate limit header in question bucket_lock: The rate limit bucket", "LoginError(\"An improper token was passed\") from e raise async def", ") await self.global_lock.lock(float(result.get(\"retry_after\"))) continue else: # 429's are unfortunately unavoidable,", "# 429's are unfortunately unavoidable, but we can attempt to", "header: CIMultiDictProxy, bucket_lock: BucketLock) -> None: \"\"\" Ingests a ratelimit", "# if we get a global, that's pretty bad, this", "= -1 self.remaining: int = -1 self.delta: float = 0.0", "as _uriquote from weakref import WeakValueDictionary import aiohttp from aiohttp", "def lock(self, delta: float) -> None: \"\"\" Lock the global", "lock.defer_unlock() # lock this route and wait for unlock continue", "\"\"\" self.bucket_hash = header.get(\"x-ratelimit-bucket\") self.limit = int(header.get(\"x-ratelimit-limit\") or -1) self.remaining", "def get_ratelimit(self, route: Route) -> BucketLock: \"\"\" Get a route's", "= False loop = asyncio.get_running_loop() loop.call_later(self.delta, self.unlock) async def defer_unlock(self)", "loop is None else loop self.__session: Absent[Optional[ClientSession]] = MISSING self.token:", "= int(header.get(\"x-ratelimit-remaining\") or -1) self.delta = float(header.get(\"x-ratelimit-reset-after\", 0.0)) async def", "else loop self.__session: Absent[Optional[ClientSession]] = MISSING self.token: Optional[str] = None", "return BucketLock() def ingest_ratelimit(self, route: Route, header: CIMultiDictProxy, bucket_lock: BucketLock)", "will be returned for attempt in range(self._max_attempts): async with lock:", "await lock.defer_unlock() # lock this route and wait for unlock", "result = await response_decode(response) self.ingest_ratelimit(route, response.headers, lock) if response.status ==", "the token to use returns: The currently logged in bot's", ":= self._endpoints.get(route.rl_bucket): # we have seen this route before, we", "continue elif lock.remaining == 0: # Last call available in", "await asyncio.sleep(1 + attempt * 2) continue if not 300", "if self.token: kwargs[\"headers\"][\"Authorization\"] = f\"Bot {self.token}\" if reason not in", "loop self.__session: Absent[Optional[ClientSession]] = MISSING self.token: Optional[str] = None self.global_lock:", "route. # If this endpoint has been used before, it", "from discord to determine ratelimit. Args: route: The route we're", "endpoint has been used before, it will get an existing", "will get an existing ratelimit for the respective buckethash #", "us exceeding the global rate limit by throttling http requests", "seconds\" ) await lock.blind_defer_unlock() # lock this route, but continue", "The route to take json: A json payload to send", "MISSING, **kwargs: Dict[str, Any], ) -> Any: \"\"\" Make a", "await asyncio.sleep(self.cooldown_system.get_cooldown_time()) async def lock(self, delta: float) -> None: \"\"\"", "Args: route: The route to fetch the ratelimit bucket for", "limit bucket for this route \"\"\" bucket_lock.ingest_ratelimit_header(header) if bucket_lock.bucket_hash: #", "use returns: The currently logged in bot's data \"\"\" self.__session", "seconds\" ) await self.global_lock.lock(float(result.get(\"retry_after\"))) continue else: # 429's are unfortunately", "the respective buckethash # otherwise a brand-new bucket lock will", "discord rate limit header to configure this bucket lock. Args:", "route we're ingesting ratelimit for header: The rate limit header", "parameters: url: the url to connect to \"\"\" return await", "({lock.limit})! Locking route for {lock.delta} seconds\" ) await lock.blind_defer_unlock() #", "502, 504}: # Server issues, retry log.warning( f\"{route.endpoint} Received {response.status}...", "configure this bucket lock. Args: header: A header from a", "isinstance(data, FormData): kwargs[\"data\"] = data lock = self.get_ratelimit(route) # this", "been used before, it will get an existing ratelimit for", "for sending requests to the Discord API.\"\"\" def __init__(self, connector:", "== 401: raise LoginError(\"An improper token was passed\") from e", "self._lock.release() class BucketLock: \"\"\"Manages the ratelimit for each bucket\"\"\" def", "try: data: dict = await self.request(Route(\"GET\", \"/gateway\")) except HTTPException as", "as exc: raise GatewayNotFound from exc return \"{0}?encoding={1}&v=9&compress=zlib-stream\".format(data[\"url\"], \"json\") async", "The time to keep the lock acquired \"\"\" await self._lock.acquire()", "-> None: self._lock: asyncio.Lock = asyncio.Lock() self.unlock_on_exit: bool = True", "bucket lock. Args: header: A header from a http response", "blind_defer_unlock(self) -> None: \"\"\"Unlocks the BucketLock but doesn't wait for", "Optional[BaseConnector] = connector self.loop = asyncio.get_event_loop() if loop is None", "def __aenter__(self) -> None: await self._lock.acquire() async def __aexit__(self, *args)", "rate_limit(self) -> None: async with self._lock: while not self.cooldown_system.acquire_token(): await", "lock return BucketLock() def ingest_ratelimit(self, route: Route, header: CIMultiDictProxy, bucket_lock:", "\"\"\" Ingests a ratelimit header from discord to determine ratelimit.", "processing the current response elif response.status in {500, 502, 504}:", "float) -> None: \"\"\" Lock the global lock for a", "else: raise HTTPException(response, response_data=result, route=route) async def request_cdn(self, url, asset)", "\"/gateway\")) except HTTPException as exc: raise GatewayNotFound from exc return", "bucket.\"\"\" self._lock.release() def ingest_ratelimit_header(self, header: CIMultiDictProxy) -> None: \"\"\" Ingests", "self._lock.locked(): self.unlock() self.unlock_on_exit = True class HTTPClient( BotRequests, ChannelRequests, EmojiRequests,", "is hitting the api from 2 clients sharing a token", "limit bucket. Args: route: The route to fetch the ratelimit", "await asyncio.sleep(delta) self._lock.release() class BucketLock: \"\"\"Manages the ratelimit for each", "in {1 + attempt * 2} seconds\" ) await asyncio.sleep(1", "= header.get(\"x-ratelimit-bucket\") self.limit = int(header.get(\"x-ratelimit-limit\") or -1) self.remaining = int(header.get(\"x-ratelimit-remaining\")", "bucket_lock.bucket_hash self.ratelimit_locks[bucket_lock.bucket_hash] = bucket_lock async def request( self, route: Route,", "import logging from typing import Any, Dict, Optional, Union from", "lock) if response.status == 429: # ratelimit exceeded if result.get(\"global\",", "bucket\"\"\" def __init__(self) -> None: self._lock: asyncio.Lock = asyncio.Lock() self.unlock_on_exit:", "Optional[str] = None self.limit: int = -1 self.remaining: int =", "def __init__(self) -> None: self._lock: asyncio.Lock = asyncio.Lock() self.unlock_on_exit: bool", "def request_cdn(self, url, asset) -> bytes: log.debug(f\"{asset} requests {url} from", "async def websocket_connect(self, url: str) -> ClientWebSocketResponse: \"\"\" Connect to", "from CDN\") async with self.__session.get(url) as response: if response.status ==", "{response.status}\") if response.status == 403: raise Forbidden(response, response_data=result, route=route) elif", "bot's data \"\"\" self.__session = ClientSession(connector=self.connector) self.token = token try:", "a BucketLock for this route. # If this endpoint has", "in data] elif isinstance(data, dict): kwargs[\"json\"] = dict_filter_missing(data) elif isinstance(data,", "None: async with self._lock: while not self.cooldown_system.acquire_token(): await asyncio.sleep(self.cooldown_system.get_cooldown_time()) async", "attempt * 2) continue if not 300 > response.status >=", "bucket_lock.bucket_hash: # We only ever try and cache the bucket", "= bucket_lock.bucket_hash self.ratelimit_locks[bucket_lock.bucket_hash] = bucket_lock async def request( self, route:", "await response.read() await self._raise_exception(response, asset, await response_decode(response)) async def login(self,", "rate limit header to configure this bucket lock. Args: header:", "route, but continue processing the current response elif response.status in", "log.error( f\"Bot has exceeded global ratelimit, locking REST API for", "isinstance(data, (list, dict)): kwargs[\"headers\"][\"Content-Type\"] = \"application/json\" # sanity check payload", "= None self.limit: int = -1 self.remaining: int = -1", "we're doing well log.warning( f\"{route.endpoint} Has exceeded it's ratelimit ({lock.limit})!", "token: the token to use returns: The currently logged in", "self._lock.acquire() await asyncio.sleep(delta) self._lock.release() class BucketLock: \"\"\"Manages the ratelimit for", "ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ) from dis_snek.client.const import __py_version__, __repo_url__,", "= ( f\"DiscordBot ({__repo_url__} {__version__} Python/{__py_version__}) aiohttp/{aiohttp.__version__}\" ) def __del__(self):", "__del__(self): if self.__session and not self.__session.closed: self.loop.run_until_complete(self.__session.close()) def get_ratelimit(self, route:", "* 2) continue raise async def _raise_exception(self, response, route, result):", "await self.login(self.token) async with self.__session.request(route.method, route.url, **kwargs) as response: result", "return f\"<BucketLock: {self.bucket_hash or 'Generic'}>\" @property def locked(self) -> bool:", "Make a request to discord. parameters: route: The route to", "= self.get_ratelimit(route) # this gets a BucketLock for this route.", "ScheduledEventsRequests, ) from dis_snek.client.const import __py_version__, __repo_url__, __version__, logger_name, MISSING,", "is acquired.\"\"\" return self._lock.locked() def unlock(self) -> None: \"\"\"Unlock this", "api from 2 clients sharing a token log.error( f\"Bot has", "__all__ = [\"HTTPClient\"] log = logging.getLogger(logger_name) class GlobalLock: \"\"\"Manages the", "float = 0.0 def __repr__(self) -> str: return f\"<BucketLock: {self.bucket_hash", "reason: Absent[str] = MISSING, **kwargs: Dict[str, Any], ) -> Any:", "import asyncio import logging from typing import Any, Dict, Optional,", "= f\"Bot {self.token}\" if reason not in (None, MISSING): kwargs[\"headers\"][\"X-Audit-Log-Reason\"]", "else: # 429's are unfortunately unavoidable, but we can attempt", "result except OSError as e: if attempt < self._max_attempts -", "check payload if isinstance(data, list): kwargs[\"json\"] = [dict_filter_missing(x) if isinstance(x,", "Optional[str] = None self.global_lock: GlobalLock = GlobalLock() self._max_attempts: int =", "interaction with discords http endpoints.\"\"\" import asyncio import logging from", "after a specified delay.\"\"\" self.unlock_on_exit = False await asyncio.sleep(self.delta) self.unlock()", "def close(self) -> None: \"\"\"Close the session.\"\"\" if self.__session: await", "specified delay.\"\"\" self.unlock_on_exit = False await asyncio.sleep(self.delta) self.unlock() async def", "for: {bucket_lock.bucket_hash}\") self._endpoints[route.rl_bucket] = bucket_lock.bucket_hash self.ratelimit_locks[bucket_lock.bucket_hash] = bucket_lock async def", "continue if not 300 > response.status >= 200: await self._raise_exception(response,", "unlimited endpoints) log.debug(f\"Caching ingested rate limit data for: {bucket_lock.bucket_hash}\") self._endpoints[route.rl_bucket]", "f\"{route.endpoint} Received {response.status} :: [{lock.remaining}/{lock.limit} calls remaining]\" ) return result", ") return result except OSError as e: if attempt <", "from weakref import WeakValueDictionary import aiohttp from aiohttp import BaseConnector,", "None: await self._lock.acquire() async def __aexit__(self, *args) -> None: if", "self.cooldown_system: CooldownSystem = CooldownSystem( 45, 1 ) # global rate-limit", "2) continue raise async def _raise_exception(self, response, route, result): log.error(f\"{route.method}::{route.url}:", "None: self._lock: asyncio.Lock = asyncio.Lock() self.unlock_on_exit: bool = True self.bucket_hash:", "token: str) -> dict: \"\"\" \"Login\" to the gateway, basically", "BucketLock() def ingest_ratelimit(self, route: Route, header: CIMultiDictProxy, bucket_lock: BucketLock) ->", "dis_snek.client.utils.input_utils import response_decode from dis_snek.client.utils.serializer import dict_filter_missing from dis_snek.models import", "log.warning( f\"{route.endpoint} Received {response.status}... retrying in {1 + attempt *", "The rate limit header in question bucket_lock: The rate limit", "await response_decode(response)) async def login(self, token: str) -> dict: \"\"\"", "infrequent we're doing well log.warning( f\"{route.endpoint} Has exceeded it's ratelimit", "= 3 self.ratelimit_locks: WeakValueDictionary[str, BucketLock] = WeakValueDictionary() self._endpoints = {}", "REST API for {result.get('retry_after')} seconds\" ) await self.global_lock.lock(float(result.get(\"retry_after\"))) continue else:", "response_decode from dis_snek.client.utils.serializer import dict_filter_missing from dis_snek.models import CooldownSystem from", "HTTPClient( BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests, MemberRequests, MessageRequests, ReactionRequests, StickerRequests,", "= WeakValueDictionary() self._endpoints = {} self.user_agent: str = ( f\"DiscordBot", "if lock: # if we have an active lock on", "return await self.__session.ws_connect( url, timeout=30, max_msg_size=0, autoclose=False, headers={\"User-Agent\": self.user_agent}, compress=0", "header.get(\"x-ratelimit-bucket\") self.limit = int(header.get(\"x-ratelimit-limit\") or -1) self.remaining = int(header.get(\"x-ratelimit-remaining\") or", "audit logs \"\"\" # Assemble headers kwargs[\"headers\"] = {\"User-Agent\": self.user_agent}", "improper token was passed\") from e raise async def close(self)", "429's are unfortunately unavoidable, but we can attempt to avoid", "if the user is hitting the api from 2 clients", "== 0: # Last call available in the bucket, lock", "payload if isinstance(data, list): kwargs[\"json\"] = [dict_filter_missing(x) if isinstance(x, dict)", "url, asset) -> bytes: log.debug(f\"{asset} requests {url} from CDN\") async", "send in the request reason: Attach a reason to this", "False await asyncio.sleep(self.delta) self.unlock() async def __aenter__(self) -> None: await", "BucketLock object for this route \"\"\" if bucket_hash := self._endpoints.get(route.rl_bucket):", "a discord rate limit header to configure this bucket lock.", "HTTPException, NotFound, LoginError from dis_snek.client.utils.input_utils import response_decode from dis_snek.client.utils.serializer import", "log = logging.getLogger(logger_name) class GlobalLock: \"\"\"Manages the global ratelimit\"\"\" def", "this route \"\"\" if bucket_hash := self._endpoints.get(route.rl_bucket): # we have", "self.ratelimit_locks: WeakValueDictionary[str, BucketLock] = WeakValueDictionary() self._endpoints = {} self.user_agent: str", "ratelimit exceeded if result.get(\"global\", False): # if we get a", "them # so long as these are infrequent we're doing", "seconds\" ) await asyncio.sleep(1 + attempt * 2) continue if", "websocket_connect(self, url: str) -> ClientWebSocketResponse: \"\"\" Connect to the websocket.", "to \"\"\" return await self.__session.ws_connect( url, timeout=30, max_msg_size=0, autoclose=False, headers={\"User-Agent\":", "raise HTTPException(response, response_data=result, route=route) async def request_cdn(self, url, asset) ->", "self.unlock() self.unlock_on_exit = True class HTTPClient( BotRequests, ChannelRequests, EmojiRequests, GuildRequests,", "f\"{route.endpoint} Received {response.status}... retrying in {1 + attempt * 2}", "self.unlock) async def defer_unlock(self) -> None: \"\"\"Unlocks the BucketLock after", "a given duration. Args: delta: The time to keep the", "MISSING, reason: Absent[str] = MISSING, **kwargs: Dict[str, Any], ) ->", "-> bytes: log.debug(f\"{asset} requests {url} from CDN\") async with self.__session.get(url)", "to take json: A json payload to send in the", "this route \"\"\" bucket_lock.ingest_ratelimit_header(header) if bucket_lock.bucket_hash: # We only ever", "token log.error( f\"Bot has exceeded global ratelimit, locking REST API", "continue else: # 429's are unfortunately unavoidable, but we can", "== 403: raise Forbidden(response, response_data=result, route=route) elif response.status == 404:", "with discords http endpoints.\"\"\" import asyncio import logging from typing", "in (None, MISSING): kwargs[\"headers\"][\"X-Audit-Log-Reason\"] = _uriquote(reason, safe=\"/ \") if isinstance(data,", "and e.errno in (54, 10054): await asyncio.sleep(1 + attempt *", "url to connect to \"\"\" return await self.__session.ws_connect( url, timeout=30,", "http endpoints.\"\"\" import asyncio import logging from typing import Any,", "Returns: The BucketLock object for this route \"\"\" if bucket_hash", "from aiohttp import BaseConnector, ClientSession, ClientWebSocketResponse, FormData from multidict import", "header from a http response \"\"\" self.bucket_hash = header.get(\"x-ratelimit-bucket\") self.limit", "Locking route for {lock.delta} seconds\" ) await lock.blind_defer_unlock() # lock", "self.limit = int(header.get(\"x-ratelimit-limit\") or -1) self.remaining = int(header.get(\"x-ratelimit-remaining\") or -1)", "loop = asyncio.get_running_loop() loop.call_later(self.delta, self.unlock) async def defer_unlock(self) -> None:", "== 200: return await response.read() await self._raise_exception(response, asset, await response_decode(response))", "BucketLock for this route. # If this endpoint has been", "as e: if attempt < self._max_attempts - 1 and e.errno", "locked(self) -> bool: \"\"\"Return True if lock is acquired.\"\"\" return", "return \"{0}?encoding={1}&v=9&compress=zlib-stream\".format(data[\"url\"], \"json\") async def websocket_connect(self, url: str) -> ClientWebSocketResponse:", "3 self.ratelimit_locks: WeakValueDictionary[str, BucketLock] = WeakValueDictionary() self._endpoints = {} self.user_agent:", "def __init__(self) -> None: self.cooldown_system: CooldownSystem = CooldownSystem( 45, 1", "-> None: \"\"\" Lock the global lock for a given", "# we have seen this route before, we know which", "if reason not in (None, MISSING): kwargs[\"headers\"][\"X-Audit-Log-Reason\"] = _uriquote(reason, safe=\"/", "= False await asyncio.sleep(self.delta) self.unlock() async def __aenter__(self) -> None:", "Get a route's rate limit bucket. Args: route: The route", "for Returns: The BucketLock object for this route \"\"\" if", "object for this route \"\"\" if bucket_hash := self._endpoints.get(route.rl_bucket): #", "Server issues, retry log.warning( f\"{route.endpoint} Received {response.status}... retrying in {1", "asyncio.Lock() async def rate_limit(self) -> None: async with self._lock: while", "global rate-limit is 50 per second, conservatively we use 45", "we have seen this route before, we know which bucket", "from urllib.parse import quote as _uriquote from weakref import WeakValueDictionary", "exhausted its ratelimit ({lock.limit})! Locking route for {lock.delta} seconds\" )", "return result except OSError as e: if attempt < self._max_attempts", "- 1 and e.errno in (54, 10054): await asyncio.sleep(1 +", "403: raise Forbidden(response, response_data=result, route=route) elif response.status == 404: raise", "await self.__session.close() async def get_gateway(self) -> str: \"\"\"Get the gateway", "payload to send in the request reason: Attach a reason", "async with self.__session.request(route.method, route.url, **kwargs) as response: result = await", "self.user_agent} if self.token: kwargs[\"headers\"][\"Authorization\"] = f\"Bot {self.token}\" if reason not", "ingest_ratelimit(self, route: Route, header: CIMultiDictProxy, bucket_lock: BucketLock) -> None: \"\"\"", "with lock = self.ratelimit_locks.get(bucket_hash) if lock: # if we have", "HTTPException as e: if e.status == 401: raise LoginError(\"An improper", "async def __aenter__(self) -> None: await self._lock.acquire() async def __aexit__(self,", "active lock on this route, it'll still be in the", "(list, dict)): kwargs[\"headers\"][\"Content-Type\"] = \"application/json\" # sanity check payload if", "BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests, MemberRequests, MessageRequests, ReactionRequests, StickerRequests, ThreadRequests,", "self.__session: await self.__session.close() async def get_gateway(self) -> str: \"\"\"Get the", "dict): kwargs[\"json\"] = dict_filter_missing(data) elif isinstance(data, FormData): kwargs[\"data\"] = data", "basically validates the token and grabs user data. parameters: token:", "route's rate limit bucket. Args: route: The route to fetch", "self.token: kwargs[\"headers\"][\"Authorization\"] = f\"Bot {self.token}\" if reason not in (None,", "{result.get('retry_after')} seconds\" ) await self.global_lock.lock(float(result.get(\"retry_after\"))) continue else: # 429's are", "bucket it is associated with lock = self.ratelimit_locks.get(bucket_hash) if lock:", "raise DiscordError(response, response_data=result, route=route) else: raise HTTPException(response, response_data=result, route=route) async", "-1) self.delta = float(header.get(\"x-ratelimit-reset-after\", 0.0)) async def blind_defer_unlock(self) -> None:", "f\"{route.endpoint} Has exceeded it's ratelimit ({lock.limit})! Reset in {lock.delta} seconds\"", "response.status >= 500: raise DiscordError(response, response_data=result, route=route) else: raise HTTPException(response,", "True class HTTPClient( BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests, MemberRequests, MessageRequests,", "happen if the user is hitting the api from 2", "route for {lock.delta} seconds\" ) await lock.blind_defer_unlock() # lock this", "route: The route to fetch the ratelimit bucket for Returns:", "been set (ignores unlimited endpoints) log.debug(f\"Caching ingested rate limit data", "def locked(self) -> bool: \"\"\"Return True if lock is acquired.\"\"\"", "http client for sending requests to the Discord API.\"\"\" def", "\"\"\"Unlocks the BucketLock after a specified delay.\"\"\" self.unlock_on_exit = False", "from a http response \"\"\" self.bucket_hash = header.get(\"x-ratelimit-bucket\") self.limit =", "self._raise_exception(response, asset, await response_decode(response)) async def login(self, token: str) ->", "# return that lock return lock # if no cached", "kwargs[\"data\"] = data lock = self.get_ratelimit(route) # this gets a", "limit data for: {bucket_lock.bucket_hash}\") self._endpoints[route.rl_bucket] = bucket_lock.bucket_hash self.ratelimit_locks[bucket_lock.bucket_hash] = bucket_lock", "GatewayNotFound from exc return \"{0}?encoding={1}&v=9&compress=zlib-stream\".format(data[\"url\"], \"json\") async def websocket_connect(self, url:", "self._endpoints = {} self.user_agent: str = ( f\"DiscordBot ({__repo_url__} {__version__}", "str: \"\"\"Get the gateway url.\"\"\" try: data: dict = await", "Discord API.\"\"\" def __init__(self, connector: Optional[BaseConnector] = None, loop: Optional[asyncio.AbstractEventLoop]", "{} self.user_agent: str = ( f\"DiscordBot ({__repo_url__} {__version__} Python/{__py_version__}) aiohttp/{aiohttp.__version__}\"", "prevent us exceeding the global rate limit by throttling http", "import CIMultiDictProxy from dis_snek.api.http.http_requests import ( BotRequests, ChannelRequests, EmojiRequests, GuildRequests,", "None: self.cooldown_system: CooldownSystem = CooldownSystem( 45, 1 ) # global", "kwargs[\"json\"] = [dict_filter_missing(x) if isinstance(x, dict) else x for x", "= [\"HTTPClient\"] log = logging.getLogger(logger_name) class GlobalLock: \"\"\"Manages the global", "if we get a global, that's pretty bad, this would", "route: The route to take json: A json payload to", "user is hitting the api from 2 clients sharing a", "delta: float) -> None: \"\"\" Lock the global lock for", "1 ) # global rate-limit is 50 per second, conservatively", "self.connector: Optional[BaseConnector] = connector self.loop = asyncio.get_event_loop() if loop is", "1 and e.errno in (54, 10054): await asyncio.sleep(1 + attempt", ">= 500: raise DiscordError(response, response_data=result, route=route) else: raise HTTPException(response, response_data=result,", "take json: A json payload to send in the request", "0: # Last call available in the bucket, lock until", "# Server issues, retry log.warning( f\"{route.endpoint} Received {response.status}... retrying in", "to the gateway, basically validates the token and grabs user", "in bot's data \"\"\" self.__session = ClientSession(connector=self.connector) self.token = token", "elif lock.remaining == 0: # Last call available in the", "e: if e.status == 401: raise LoginError(\"An improper token was", "= asyncio.Lock() async def rate_limit(self) -> None: async with self._lock:", "x for x in data] elif isinstance(data, dict): kwargs[\"json\"] =", "request to discord. parameters: route: The route to take json:", "self, route: Route, data: Absent[Union[dict, FormData]] = MISSING, reason: Absent[str]", "-> None: await self._lock.acquire() async def __aexit__(self, *args) -> None:", "from multidict import CIMultiDictProxy from dis_snek.api.http.http_requests import ( BotRequests, ChannelRequests,", "= ClientSession(connector=self.connector) self.token = token try: return await self.request(Route(\"GET\", \"/users/@me\"))", "lock = self.get_ratelimit(route) # this gets a BucketLock for this", "grabs user data. parameters: token: the token to use returns:", "logged in bot's data \"\"\" self.__session = ClientSession(connector=self.connector) self.token =", "if self.__session: await self.__session.close() async def get_gateway(self) -> str: \"\"\"Get", "per second, conservatively we use 45 self._lock: asyncio.Lock = asyncio.Lock()", "fetch the ratelimit bucket for Returns: The BucketLock object for", "asyncio.get_running_loop() loop.call_later(self.delta, self.unlock) async def defer_unlock(self) -> None: \"\"\"Unlocks the", "currently logged in bot's data \"\"\" self.__session = ClientSession(connector=self.connector) self.token", "\"\"\"Get the gateway url.\"\"\" try: data: dict = await self.request(Route(\"GET\",", "int(header.get(\"x-ratelimit-remaining\") or -1) self.delta = float(header.get(\"x-ratelimit-reset-after\", 0.0)) async def blind_defer_unlock(self)", "get an existing ratelimit for the respective buckethash # otherwise", "exceeding the global rate limit by throttling http requests if", "the api from 2 clients sharing a token log.error( f\"Bot", "__aenter__(self) -> None: await self._lock.acquire() async def __aexit__(self, *args) ->", "NotFound, LoginError from dis_snek.client.utils.input_utils import response_decode from dis_snek.client.utils.serializer import dict_filter_missing", "= logging.getLogger(logger_name) class GlobalLock: \"\"\"Manages the global ratelimit\"\"\" def __init__(self)", "import quote as _uriquote from weakref import WeakValueDictionary import aiohttp", "if self.unlock_on_exit and self._lock.locked(): self.unlock() self.unlock_on_exit = True class HTTPClient(", "self.global_lock.rate_limit() # prevent us exceeding the global rate limit by", "rate limit by throttling http requests if self.__session.closed: await self.login(self.token)", "lock this route and wait for unlock continue elif lock.remaining", "{lock.delta} seconds\" ) await lock.blind_defer_unlock() # lock this route, but", "if self.__session and not self.__session.closed: self.loop.run_until_complete(self.__session.close()) def get_ratelimit(self, route: Route)", "long as these are infrequent we're doing well log.warning( f\"{route.endpoint}", "is 50 per second, conservatively we use 45 self._lock: asyncio.Lock", "45 self._lock: asyncio.Lock = asyncio.Lock() async def rate_limit(self) -> None:", "lock: # if we have an active lock on this", "= 0.0 def __repr__(self) -> str: return f\"<BucketLock: {self.bucket_hash or", "used before, it will get an existing ratelimit for the", "to the websocket. parameters: url: the url to connect to", "be returned for attempt in range(self._max_attempts): async with lock: try:", "self._lock: asyncio.Lock = asyncio.Lock() async def rate_limit(self) -> None: async", "Union from urllib.parse import quote as _uriquote from weakref import", "{1 + attempt * 2} seconds\" ) await asyncio.sleep(1 +", "reason to this request, used for audit logs \"\"\" #", "the websocket. parameters: url: the url to connect to \"\"\"", "ThreadRequests, UserRequests, WebhookRequests, ScheduledEventsRequests, ): \"\"\"A http client for sending", "self.request(Route(\"GET\", \"/gateway\")) except HTTPException as exc: raise GatewayNotFound from exc", "Optional[asyncio.AbstractEventLoop] = None): self.connector: Optional[BaseConnector] = connector self.loop = asyncio.get_event_loop()", "= -1 self.delta: float = 0.0 def __repr__(self) -> str:", "ratelimit for the respective buckethash # otherwise a brand-new bucket", "for this route \"\"\" bucket_lock.ingest_ratelimit_header(header) if bucket_lock.bucket_hash: # We only", "log.debug( f\"{route.endpoint} Has exhausted its ratelimit ({lock.limit})! Locking route for", "None): self.connector: Optional[BaseConnector] = connector self.loop = asyncio.get_event_loop() if loop", "CIMultiDictProxy from dis_snek.api.http.http_requests import ( BotRequests, ChannelRequests, EmojiRequests, GuildRequests, InteractionRequests,", "ratelimit. Args: route: The route we're ingesting ratelimit for header:", "Received {response.status}... retrying in {1 + attempt * 2} seconds\"", "url: the url to connect to \"\"\" return await self.__session.ws_connect(", "this gets a BucketLock for this route. # If this", "limit header to configure this bucket lock. Args: header: A", "a reason to this request, used for audit logs \"\"\"", "import Route __all__ = [\"HTTPClient\"] log = logging.getLogger(logger_name) class GlobalLock:", "None: \"\"\"Unlock this bucket.\"\"\" self._lock.release() def ingest_ratelimit_header(self, header: CIMultiDictProxy) ->", "header: The rate limit header in question bucket_lock: The rate", "# global rate-limit is 50 per second, conservatively we use", "to send in the request reason: Attach a reason to", "result) log.debug( f\"{route.endpoint} Received {response.status} :: [{lock.remaining}/{lock.limit} calls remaining]\" )", "was passed\") from e raise async def close(self) -> None:", "websocket. parameters: url: the url to connect to \"\"\" return", "for x in data] elif isinstance(data, dict): kwargs[\"json\"] = dict_filter_missing(data)", "({__repo_url__} {__version__} Python/{__py_version__}) aiohttp/{aiohttp.__version__}\" ) def __del__(self): if self.__session and", "A header from a http response \"\"\" self.bucket_hash = header.get(\"x-ratelimit-bucket\")", "f\"Bot {self.token}\" if reason not in (None, MISSING): kwargs[\"headers\"][\"X-Audit-Log-Reason\"] =", "lock will be returned for attempt in range(self._max_attempts): async with", "available in the bucket, lock until reset log.debug( f\"{route.endpoint} Has", "wait for unlock continue elif lock.remaining == 0: # Last", "call available in the bucket, lock until reset log.debug( f\"{route.endpoint}", "while not self.cooldown_system.acquire_token(): await asyncio.sleep(self.cooldown_system.get_cooldown_time()) async def lock(self, delta: float)", "-> None: self.cooldown_system: CooldownSystem = CooldownSystem( 45, 1 ) #", "45, 1 ) # global rate-limit is 50 per second,", "route: Route) -> BucketLock: \"\"\" Get a route's rate limit" ]
[ "= environ.get('SECRET_KEY') USER = environ.get('DB_USER') PASSWORD = environ.get('DB_PASSWORD') DB_NAME =", "False CSRF_ENABLED = True SECRET_KEY = environ.get('SECRET_KEY') USER = environ.get('DB_USER')", "= environ.get('DB_HOST') SQLALCHEMY_DATABASE_URI = f\"postgresql://{USER}:{PASSWORD}@{HOST}/{DB_NAME}\" SQLALCHEMY_TRACK_MODIFICATIONS = False # jwt", "SECRET_KEY = environ.get('SECRET_KEY') USER = environ.get('DB_USER') PASSWORD = environ.get('DB_PASSWORD') DB_NAME", "= timedelta(days=1) # pagination NUM_OF_ITEMS_PER_PAGE = 18 class DevelopmentConfig(Config): \"\"\"", "DEBUG = True SQLALCHEMY_TRACK_MODIFICATIONS = True class ProductionConfig(Config): DEBUG =", "SQLALCHEMY_TRACK_MODIFICATIONS = True class ProductionConfig(Config): DEBUG = False SQLALCHEMY_TRACK_MODIFICATIONS =", "f\"postgresql://{USER}:{PASSWORD}@{HOST}/{DB_NAME}\" SQLALCHEMY_TRACK_MODIFICATIONS = False # jwt configuarations for the user", "auth api JWT_SECRET_KEY = environ.get('SECRET_KEY') JWT_ACCESS_TOKEN_EXPIRES = timedelta(days=1) # pagination", "\"development\" DEBUG = True SQLALCHEMY_TRACK_MODIFICATIONS = True class ProductionConfig(Config): DEBUG", "Config(object): \"\"\" app configuration class \"\"\" TESTING = False CSRF_ENABLED", "app development configuration class \"\"\" ENV = \"development\" DEBUG =", "class \"\"\" TESTING = False CSRF_ENABLED = True SECRET_KEY =", "# jwt configuarations for the user auth api JWT_SECRET_KEY =", "the user auth api JWT_SECRET_KEY = environ.get('SECRET_KEY') JWT_ACCESS_TOKEN_EXPIRES = timedelta(days=1)", "class \"\"\" ENV = \"development\" DEBUG = True SQLALCHEMY_TRACK_MODIFICATIONS =", "CSRF_ENABLED = True SECRET_KEY = environ.get('SECRET_KEY') USER = environ.get('DB_USER') PASSWORD", "load_dotenv load_dotenv() class Config(object): \"\"\" app configuration class \"\"\" TESTING", "False # jwt configuarations for the user auth api JWT_SECRET_KEY", "USER = environ.get('DB_USER') PASSWORD = environ.get('DB_PASSWORD') DB_NAME = environ.get('DB_NAME') HOST", "import timedelta from dotenv import load_dotenv load_dotenv() class Config(object): \"\"\"", "class DevelopmentConfig(Config): \"\"\" app development configuration class \"\"\" ENV =", "datetime import timedelta from dotenv import load_dotenv load_dotenv() class Config(object):", "ENV = \"development\" DEBUG = True SQLALCHEMY_TRACK_MODIFICATIONS = True class", "SQLALCHEMY_DATABASE_URI = f\"postgresql://{USER}:{PASSWORD}@{HOST}/{DB_NAME}\" SQLALCHEMY_TRACK_MODIFICATIONS = False # jwt configuarations for", "configuration class \"\"\" ENV = \"development\" DEBUG = True SQLALCHEMY_TRACK_MODIFICATIONS", "environ.get('SECRET_KEY') JWT_ACCESS_TOKEN_EXPIRES = timedelta(days=1) # pagination NUM_OF_ITEMS_PER_PAGE = 18 class", "dotenv import load_dotenv load_dotenv() class Config(object): \"\"\" app configuration class", "import psycopg2 from datetime import timedelta from dotenv import load_dotenv", "for the user auth api JWT_SECRET_KEY = environ.get('SECRET_KEY') JWT_ACCESS_TOKEN_EXPIRES =", "development configuration class \"\"\" ENV = \"development\" DEBUG = True", "= f\"postgresql://{USER}:{PASSWORD}@{HOST}/{DB_NAME}\" SQLALCHEMY_TRACK_MODIFICATIONS = False # jwt configuarations for the", "environ.get('SECRET_KEY') USER = environ.get('DB_USER') PASSWORD = environ.get('DB_PASSWORD') DB_NAME = environ.get('DB_NAME')", "environ.get('DB_HOST') SQLALCHEMY_DATABASE_URI = f\"postgresql://{USER}:{PASSWORD}@{HOST}/{DB_NAME}\" SQLALCHEMY_TRACK_MODIFICATIONS = False # jwt configuarations", "import load_dotenv load_dotenv() class Config(object): \"\"\" app configuration class \"\"\"", "load_dotenv() class Config(object): \"\"\" app configuration class \"\"\" TESTING =", "= False # jwt configuarations for the user auth api", "timedelta(days=1) # pagination NUM_OF_ITEMS_PER_PAGE = 18 class DevelopmentConfig(Config): \"\"\" app", "= environ.get('DB_USER') PASSWORD = environ.get('DB_PASSWORD') DB_NAME = environ.get('DB_NAME') HOST =", "\"\"\" app development configuration class \"\"\" ENV = \"development\" DEBUG", "# pagination NUM_OF_ITEMS_PER_PAGE = 18 class DevelopmentConfig(Config): \"\"\" app development", "= environ.get('SECRET_KEY') JWT_ACCESS_TOKEN_EXPIRES = timedelta(days=1) # pagination NUM_OF_ITEMS_PER_PAGE = 18", "HOST = environ.get('DB_HOST') SQLALCHEMY_DATABASE_URI = f\"postgresql://{USER}:{PASSWORD}@{HOST}/{DB_NAME}\" SQLALCHEMY_TRACK_MODIFICATIONS = False #", "app configuration class \"\"\" TESTING = False CSRF_ENABLED = True", "= True SECRET_KEY = environ.get('SECRET_KEY') USER = environ.get('DB_USER') PASSWORD =", "environ.get('DB_PASSWORD') DB_NAME = environ.get('DB_NAME') HOST = environ.get('DB_HOST') SQLALCHEMY_DATABASE_URI = f\"postgresql://{USER}:{PASSWORD}@{HOST}/{DB_NAME}\"", "pagination NUM_OF_ITEMS_PER_PAGE = 18 class DevelopmentConfig(Config): \"\"\" app development configuration", "True SQLALCHEMY_TRACK_MODIFICATIONS = True class ProductionConfig(Config): DEBUG = False SQLALCHEMY_TRACK_MODIFICATIONS", "NUM_OF_ITEMS_PER_PAGE = 18 class DevelopmentConfig(Config): \"\"\" app development configuration class", "DevelopmentConfig(Config): \"\"\" app development configuration class \"\"\" ENV = \"development\"", "import environ import psycopg2 from datetime import timedelta from dotenv", "\"\"\" app configuration class \"\"\" TESTING = False CSRF_ENABLED =", "jwt configuarations for the user auth api JWT_SECRET_KEY = environ.get('SECRET_KEY')", "= 18 class DevelopmentConfig(Config): \"\"\" app development configuration class \"\"\"", "SQLALCHEMY_TRACK_MODIFICATIONS = False # jwt configuarations for the user auth", "psycopg2 from datetime import timedelta from dotenv import load_dotenv load_dotenv()", "DB_NAME = environ.get('DB_NAME') HOST = environ.get('DB_HOST') SQLALCHEMY_DATABASE_URI = f\"postgresql://{USER}:{PASSWORD}@{HOST}/{DB_NAME}\" SQLALCHEMY_TRACK_MODIFICATIONS", "api JWT_SECRET_KEY = environ.get('SECRET_KEY') JWT_ACCESS_TOKEN_EXPIRES = timedelta(days=1) # pagination NUM_OF_ITEMS_PER_PAGE", "timedelta from dotenv import load_dotenv load_dotenv() class Config(object): \"\"\" app", "\"\"\" TESTING = False CSRF_ENABLED = True SECRET_KEY = environ.get('SECRET_KEY')", "configuration class \"\"\" TESTING = False CSRF_ENABLED = True SECRET_KEY", "= True SQLALCHEMY_TRACK_MODIFICATIONS = True class ProductionConfig(Config): DEBUG = False", "os import environ import psycopg2 from datetime import timedelta from", "from dotenv import load_dotenv load_dotenv() class Config(object): \"\"\" app configuration", "= environ.get('DB_PASSWORD') DB_NAME = environ.get('DB_NAME') HOST = environ.get('DB_HOST') SQLALCHEMY_DATABASE_URI =", "from datetime import timedelta from dotenv import load_dotenv load_dotenv() class", "= \"development\" DEBUG = True SQLALCHEMY_TRACK_MODIFICATIONS = True class ProductionConfig(Config):", "environ.get('DB_NAME') HOST = environ.get('DB_HOST') SQLALCHEMY_DATABASE_URI = f\"postgresql://{USER}:{PASSWORD}@{HOST}/{DB_NAME}\" SQLALCHEMY_TRACK_MODIFICATIONS = False", "class Config(object): \"\"\" app configuration class \"\"\" TESTING = False", "TESTING = False CSRF_ENABLED = True SECRET_KEY = environ.get('SECRET_KEY') USER", "= False CSRF_ENABLED = True SECRET_KEY = environ.get('SECRET_KEY') USER =", "18 class DevelopmentConfig(Config): \"\"\" app development configuration class \"\"\" ENV", "PASSWORD = environ.get('DB_PASSWORD') DB_NAME = environ.get('DB_NAME') HOST = environ.get('DB_HOST') SQLALCHEMY_DATABASE_URI", "JWT_SECRET_KEY = environ.get('SECRET_KEY') JWT_ACCESS_TOKEN_EXPIRES = timedelta(days=1) # pagination NUM_OF_ITEMS_PER_PAGE =", "JWT_ACCESS_TOKEN_EXPIRES = timedelta(days=1) # pagination NUM_OF_ITEMS_PER_PAGE = 18 class DevelopmentConfig(Config):", "\"\"\" ENV = \"development\" DEBUG = True SQLALCHEMY_TRACK_MODIFICATIONS = True", "environ import psycopg2 from datetime import timedelta from dotenv import", "= True class ProductionConfig(Config): DEBUG = False SQLALCHEMY_TRACK_MODIFICATIONS = False", "configuarations for the user auth api JWT_SECRET_KEY = environ.get('SECRET_KEY') JWT_ACCESS_TOKEN_EXPIRES", "from os import environ import psycopg2 from datetime import timedelta", "user auth api JWT_SECRET_KEY = environ.get('SECRET_KEY') JWT_ACCESS_TOKEN_EXPIRES = timedelta(days=1) #", "environ.get('DB_USER') PASSWORD = environ.get('DB_PASSWORD') DB_NAME = environ.get('DB_NAME') HOST = environ.get('DB_HOST')", "True SECRET_KEY = environ.get('SECRET_KEY') USER = environ.get('DB_USER') PASSWORD = environ.get('DB_PASSWORD')", "= environ.get('DB_NAME') HOST = environ.get('DB_HOST') SQLALCHEMY_DATABASE_URI = f\"postgresql://{USER}:{PASSWORD}@{HOST}/{DB_NAME}\" SQLALCHEMY_TRACK_MODIFICATIONS =" ]
[ "the mnemonic seed must begin with this SEED_PREFIX = '01'", "return SEED_PREFIX_SW elif seed_type == '2fa': return SEED_PREFIX_2FA elif seed_type", "<filename>electrum/version.py ELECTRUM_VERSION = '4.1.5-radc' # version of the client package", "'4.1.5-radc' # version of the client package APK_VERSION = '4.1.5.0'", "wallet SEED_PREFIX_2FA = '101' # Two-factor authentication SEED_PREFIX_2FA_SW = '102'", "= '1.4' # protocol version requested # The hash of", "seed_type == 'segwit': return SEED_PREFIX_SW elif seed_type == '2fa': return", "begin with this SEED_PREFIX = '01' # Standard wallet SEED_PREFIX_SW", "Standard wallet SEED_PREFIX_SW = '100' # Segwit wallet SEED_PREFIX_2FA =", "this SEED_PREFIX = '01' # Standard wallet SEED_PREFIX_SW = '100'", "with this SEED_PREFIX = '01' # Standard wallet SEED_PREFIX_SW =", "elif seed_type == '2fa': return SEED_PREFIX_2FA elif seed_type == '2fa_segwit':", "== 'standard': return SEED_PREFIX elif seed_type == 'segwit': return SEED_PREFIX_SW", "ELECTRUM_VERSION = '4.1.5-radc' # version of the client package APK_VERSION", "'segwit': return SEED_PREFIX_SW elif seed_type == '2fa': return SEED_PREFIX_2FA elif", "'2fa': return SEED_PREFIX_2FA elif seed_type == '2fa_segwit': return SEED_PREFIX_2FA_SW raise", "= '4.1.5.0' # read by buildozer.spec PROTOCOL_VERSION = '1.4' #", "of the mnemonic seed must begin with this SEED_PREFIX =", "'100' # Segwit wallet SEED_PREFIX_2FA = '101' # Two-factor authentication", "seed must begin with this SEED_PREFIX = '01' # Standard", "hash of the mnemonic seed must begin with this SEED_PREFIX", "# Segwit wallet SEED_PREFIX_2FA = '101' # Two-factor authentication SEED_PREFIX_2FA_SW", "def seed_prefix(seed_type): if seed_type == 'standard': return SEED_PREFIX elif seed_type", "Two-factor auth, using segwit def seed_prefix(seed_type): if seed_type == 'standard':", "the client package APK_VERSION = '4.1.5.0' # read by buildozer.spec", "of the client package APK_VERSION = '4.1.5.0' # read by", "version of the client package APK_VERSION = '4.1.5.0' # read", "PROTOCOL_VERSION = '1.4' # protocol version requested # The hash", "seed_type == 'standard': return SEED_PREFIX elif seed_type == 'segwit': return", "= '100' # Segwit wallet SEED_PREFIX_2FA = '101' # Two-factor", "SEED_PREFIX elif seed_type == 'segwit': return SEED_PREFIX_SW elif seed_type ==", "version requested # The hash of the mnemonic seed must", "read by buildozer.spec PROTOCOL_VERSION = '1.4' # protocol version requested", "mnemonic seed must begin with this SEED_PREFIX = '01' #", "= '01' # Standard wallet SEED_PREFIX_SW = '100' # Segwit", "'1.4' # protocol version requested # The hash of the", "elif seed_type == '2fa_segwit': return SEED_PREFIX_2FA_SW raise Exception(f\"unknown seed_type: {seed_type}\")", "# Standard wallet SEED_PREFIX_SW = '100' # Segwit wallet SEED_PREFIX_2FA", "return SEED_PREFIX_2FA elif seed_type == '2fa_segwit': return SEED_PREFIX_2FA_SW raise Exception(f\"unknown", "by buildozer.spec PROTOCOL_VERSION = '1.4' # protocol version requested #", "Two-factor authentication SEED_PREFIX_2FA_SW = '102' # Two-factor auth, using segwit", "must begin with this SEED_PREFIX = '01' # Standard wallet", "APK_VERSION = '4.1.5.0' # read by buildozer.spec PROTOCOL_VERSION = '1.4'", "= '101' # Two-factor authentication SEED_PREFIX_2FA_SW = '102' # Two-factor", "auth, using segwit def seed_prefix(seed_type): if seed_type == 'standard': return", "SEED_PREFIX_2FA = '101' # Two-factor authentication SEED_PREFIX_2FA_SW = '102' #", "authentication SEED_PREFIX_2FA_SW = '102' # Two-factor auth, using segwit def", "wallet SEED_PREFIX_SW = '100' # Segwit wallet SEED_PREFIX_2FA = '101'", "client package APK_VERSION = '4.1.5.0' # read by buildozer.spec PROTOCOL_VERSION", "SEED_PREFIX = '01' # Standard wallet SEED_PREFIX_SW = '100' #", "using segwit def seed_prefix(seed_type): if seed_type == 'standard': return SEED_PREFIX", "if seed_type == 'standard': return SEED_PREFIX elif seed_type == 'segwit':", "SEED_PREFIX_2FA elif seed_type == '2fa_segwit': return SEED_PREFIX_2FA_SW raise Exception(f\"unknown seed_type:", "'01' # Standard wallet SEED_PREFIX_SW = '100' # Segwit wallet", "'102' # Two-factor auth, using segwit def seed_prefix(seed_type): if seed_type", "The hash of the mnemonic seed must begin with this", "= '4.1.5-radc' # version of the client package APK_VERSION =", "elif seed_type == 'segwit': return SEED_PREFIX_SW elif seed_type == '2fa':", "protocol version requested # The hash of the mnemonic seed", "# The hash of the mnemonic seed must begin with", "SEED_PREFIX_SW elif seed_type == '2fa': return SEED_PREFIX_2FA elif seed_type ==", "SEED_PREFIX_SW = '100' # Segwit wallet SEED_PREFIX_2FA = '101' #", "Segwit wallet SEED_PREFIX_2FA = '101' # Two-factor authentication SEED_PREFIX_2FA_SW =", "# protocol version requested # The hash of the mnemonic", "# version of the client package APK_VERSION = '4.1.5.0' #", "segwit def seed_prefix(seed_type): if seed_type == 'standard': return SEED_PREFIX elif", "buildozer.spec PROTOCOL_VERSION = '1.4' # protocol version requested # The", "# Two-factor authentication SEED_PREFIX_2FA_SW = '102' # Two-factor auth, using", "seed_type == '2fa': return SEED_PREFIX_2FA elif seed_type == '2fa_segwit': return", "seed_prefix(seed_type): if seed_type == 'standard': return SEED_PREFIX elif seed_type ==", "'101' # Two-factor authentication SEED_PREFIX_2FA_SW = '102' # Two-factor auth,", "'4.1.5.0' # read by buildozer.spec PROTOCOL_VERSION = '1.4' # protocol", "# Two-factor auth, using segwit def seed_prefix(seed_type): if seed_type ==", "requested # The hash of the mnemonic seed must begin", "return SEED_PREFIX elif seed_type == 'segwit': return SEED_PREFIX_SW elif seed_type", "== '2fa': return SEED_PREFIX_2FA elif seed_type == '2fa_segwit': return SEED_PREFIX_2FA_SW", "# read by buildozer.spec PROTOCOL_VERSION = '1.4' # protocol version", "package APK_VERSION = '4.1.5.0' # read by buildozer.spec PROTOCOL_VERSION =", "= '102' # Two-factor auth, using segwit def seed_prefix(seed_type): if", "'standard': return SEED_PREFIX elif seed_type == 'segwit': return SEED_PREFIX_SW elif", "== 'segwit': return SEED_PREFIX_SW elif seed_type == '2fa': return SEED_PREFIX_2FA", "SEED_PREFIX_2FA_SW = '102' # Two-factor auth, using segwit def seed_prefix(seed_type):" ]
[ "torch import numpy as np import cv2 from lib.utils.visualize_utils import", "= p.forward(feat_dim) print(p1) def test_filp(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps']", "0') def _setup(self, cfg): num_feat = len(self._steps) for item in", "map size\"\"\" assert self._prior_cfg is not {} return [int(len(self._create_prior(0, 0,", "p = PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer) cfg['flip'] = False", "anchor box on a feature map for prior_idx in range(prior_num):", "cfg.MODEL.IMAGE_SIZE self._steps = cfg.MODEL.STEPS self._cfg_list = [] self._prior_cfg = {}", "1 / 3], [2, 1 / 2, 3, 1 /", "max size per layer s_i_prime = sqrt(s_i * (self._prior_cfg['MAX_SIZES'][k] /", "image = image.transpose((2,0,1)) writer.add_image('base/feature_map_{}_{}'.format(feat_idx, prior_idx), image, 2) def forward(self, layer_dims,", "has writer as input\"\"\" def wrapper(*args, **kw): return func(*args, **kw)", "k) priors += prior self._prior_vis(prior, image, k, tb_writer=tb_writer) output =", "s_i = ms / self.image_size[0] s_j = ms / self.image_size[1]", "cfg['feature_maps'])] cfg['image_size'] = [600, 300] p = PriorBoxSSD(cfg) p1 =", "image = image_ori.copy() bboxs_ = bboxs[prior_idx::prior_num, :] box_centers_ = box_centers[4", "the original paper do prior = [] min_sizes = self._prior_cfg['MIN_SIZES'][k]", "TBWriter def vis(func): \"\"\"tensorboard visualization if has writer as input\"\"\"", "2) def forward(self, layer_dims, tb_writer=None, image=None): priors = [] image", "image, k, tb_writer=tb_writer) output = torch.Tensor(priors).view(-1, 4) # TODO this", "= False cfg['aspect_ratios'] = [[2, 1 / 2], [2, 1", "cv2.imread(image, -1) image = cv2.resize(image, (self.image_size[1], self.image_size[0])) return image @vis", "= [300, 600] feat_dim = [list(a) for a in zip([item", ":2] * scale[:2] # [x, y] # bboxs: [xmin, ymin,", "cfg): super(PriorBoxSSD, self).__init__(cfg) # self.image_size = cfg['image_size'] self._cfg_list = ['MIN_SIZES',", "= ms / self.image_size[0] s_j = ms / self.image_size[1] prior", "import TBWriter # tb_writer = TBWriter(log_dir, {'epoch': 50}) # #", "range(len(layer_dims)): prior = [] for i, j in product(range(layer_dims[k][0]), range(layer_dims[k][1])):", "image=None): priors = [] image = self._image_proc(image=image, tb_writer=tb_writer) for k", "p1 = p.forward(feat_dim) print(p1) def test_filp(cfg, tb_writer): cfg = copy.deepcopy(cfg)", "size per layer s_i_prime = sqrt(s_i * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[0]))", "product import torch import numpy as np import cv2 from", "self._steps = cfg.MODEL.STEPS self._cfg_list = [] self._prior_cfg = {} self._clip", "and len(cfg.MODEL[item]) != 0: raise Exception(\"config {} length does not", "visualize each anchor box on a feature map for prior_idx", "tb_writer=tb_writer) output = torch.Tensor(priors).view(-1, 4) # TODO this clip is", "PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer) cfg['flip'] = False cfg['aspect_ratios'] =", "tb_writer) # test_rectangle(cfg, tb_writer) print('haha') from lib.utils.config import cfg print(cfg)", "_setup(self, cfg): num_feat = len(self._steps) for item in self._cfg_list: if", "4)) box_centers = bboxs[:, :2] * scale[:2] # [x, y]", "(i + 0.5) / steps_y prior += self._create_prior(cx, cy, k)", "_prior_vis(self, anchor, image_ori, feat_idx, tb_writer=None): # TODO add output path", "square s_i = ms / self.image_size[0] s_j = ms /", "type(self._prior_cfg['MAX_SIZES'][k]) is not list # one max size per layer", "sqrt(ar), s_i * sqrt(ar)] return prior # PriorBox = PriorBoxSSD", "{} self._clip = cfg.MODEL.CLIP self._variance = cfg.MODEL.VARIANCE for v in", "before knowing feature map size\"\"\" assert self._prior_cfg is not {}", "test_filp(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19, 10,", "if self._flip: prior += [cx, cy, s_j / sqrt(ar), s_i", "264] cfg['flip'] = True # feat_dim = [list(a) for a", "2 for item in cfg['feature_maps']])] # cfg['image_size'] = [300, 600]", "PriorBoxSSD def test_no_vis(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38,", "lib.utils.visualize_utils import TBWriter # tb_writer = TBWriter(log_dir, {'epoch': 50}) #", "(0, 255, 0), 1) image = image[..., ::-1] image =", "def forward(self, layer_dims, tb_writer=None, image=None): priors = [] image =", "if kw['tb_writer'] is not None else None return wrapper class", "/ 2)) * scale box_centers = box_centers.astype(np.int32) bboxs = bboxs.astype(np.int32)", "cfg.MODEL.FLIP self._setup(cfg) def _create_prior(self, cx, cy, k): # as the", "type(None)): image = np.ones((self.image_size[1], self.image_size[0], 3)) elif isinstance(image, str): image", "self._prior_vis(prior, image, k, tb_writer=tb_writer) output = torch.Tensor(priors).view(-1, 4) # TODO", "for item in cfg['feature_maps']], cfg['feature_maps'])] cfg['image_size'] = [600, 300] p", "a in zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg) print(p.num_priors) p1 =", "< 1e-8 def test_rectangle(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] =", "111, 162, 213, 264] cfg['flip'] = True # feat_dim =", "self._image_proc(image=image, tb_writer=tb_writer) for k in range(len(layer_dims)): prior = [] for", "tb_writer=tb_writer) print(p1.shape) if __name__ == '__main__': import copy # from", "def vis(func): \"\"\"tensorboard visualization if has writer as input\"\"\" def", "range(prior_num): image = image_ori.copy() bboxs_ = bboxs[prior_idx::prior_num, :] box_centers_ =", "[cx, cy, s_j * sqrt(ar), s_i / sqrt(ar)] # a", "print(p1.shape) if __name__ == '__main__': import copy # from lib.datasets.config", "# [x, y] # bboxs: [xmin, ymin, xmax, ymax] bboxs", "PriorBoxSSD(PriorBoxBase): def __init__(self, cfg): super(PriorBoxSSD, self).__init__(cfg) # self.image_size = cfg['image_size']", "== '__main__': import copy # from lib.datasets.config import ssd_voc_vgg as", "def test_no_vis(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19,", "p1).sum() < 1e-8 def test_rectangle(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps']", "import sqrt as sqrt from itertools import product as product", "TBWriter(log_dir, {'epoch': 50}) # # test_no_vis(cfg, tb_writer) # test_filp(cfg, tb_writer)", "return func(*args, **kw) if kw['tb_writer'] is not None else None", "class PriorBoxBase(object): \"\"\"Compute priorbox coordinates in center-offset form for each", "in center-offset form for each source feature map. \"\"\" def", "[x, y] # bboxs: [xmin, ymin, xmax, ymax] bboxs =", "s_i_prime] # rectangles by min and aspect ratio for ar", "p.forward(feat_dim, tb_writer=tb_writer) print(p1.shape) if __name__ == '__main__': import copy #", "by min and aspect ratio for ar in self._prior_cfg['ASPECT_RATIOS'][k]: prior", "box_centers[4 * prior_idx::prior_num, :] for archor, bbox in zip(box_centers_, bboxs_):", "as product import torch import numpy as np import cv2", "image=None, tb_writer=None): # TODO test with image if isinstance(image, type(None)):", "def _create_prior(self, cx, cy, k): # as the original paper", "from lib.utils.visualize_utils import TBWriter def vis(func): \"\"\"tensorboard visualization if has", "0, k)) / 4) for k in range(len(self._steps))] def _create_prior(self,", "show diagnal anchors cv2.rectangle(image, (bbox[0], bbox[1]), (bbox[2], bbox[3]), (0, 255,", "self.image_size[0]] bboxs = np.array(anchor).reshape((-1, 4)) box_centers = bboxs[:, :2] *", "/ 2]] p = PriorBox(cfg) p2 = p.forward(feat_dim, tb_writer=tb_writer) #", "s_j_prime = sqrt(s_j * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[1])) prior += [cx,", "= [38, 19, 10, 5, 3, 1] cfg['min_sizes'] = [30,", "cx, cy, k): # as the original paper do prior", "ssd_voc_vgg as cfg # from lib.utils.visualize_utils import TBWriter # tb_writer", "p.forward(feat_dim) print(p1) def test_filp(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] =", "from itertools import product as product import torch import numpy", "archor[1]), 1, (0, 0, 255), -1) if archor[0] == archor[1]:", "/ self.image_size[0])) s_j_prime = sqrt(s_j * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[1])) prior", "= np.hstack((bboxs[:, :2] - bboxs[:, 2:4] / 2, bboxs[:, :2]", "for a in zip(cfg['feature_maps'], cfg['feature_maps'])] # cfg['image_size'] = [300, 300]", "ValueError('Variances must be greater than 0') def _setup(self, cfg): num_feat", "[[30], [60], 111, 162, 213, 264] cfg['flip'] = True feat_dim", "self.image_size[0], 3)) elif isinstance(image, str): image = cv2.imread(image, -1) image", "forward(self, layer_dims, tb_writer=None, image=None): priors = [] image = self._image_proc(image=image,", "for a in zip(cfg['feature_maps'], [item * 2 for item in", "for ms in min_sizes: # min square s_i = ms", "= {} self._clip = cfg.MODEL.CLIP self._variance = cfg.MODEL.VARIANCE for v", "v in self._variance: if v <= 0: raise ValueError('Variances must", "self._cfg_list = [] self._prior_cfg = {} self._clip = cfg.MODEL.CLIP self._variance", "cx, cy, k): raise NotImplementedError @vis def _image_proc(self, image=None, tb_writer=None):", "v <= 0: raise ValueError('Variances must be greater than 0')", "image_ori, feat_idx, tb_writer=None): # TODO add output path to the", "= TBWriter(log_dir, {'epoch': 50}) # # test_no_vis(cfg, tb_writer) # test_filp(cfg,", "@vis def _image_proc(self, image=None, tb_writer=None): # TODO test with image", "np.array(anchor).reshape((-1, 4)) box_centers = bboxs[:, :2] * scale[:2] # [x,", "than 0') def _setup(self, cfg): num_feat = len(self._steps) for item", "!= 0: raise Exception(\"config {} length does not match step", "config!\") if len(cfg.MODEL[item]) != num_feat and len(cfg.MODEL[item]) != 0: raise", "s_i / sqrt(ar)] # a vertical box if self._flip: prior", "feat_dim = [list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])] # cfg['image_size']", "(bbox[2], bbox[3]), (0, 255, 0), 1) image = image[..., ::-1]", "assert self._prior_cfg is not {} return [int(len(self._create_prior(0, 0, k)) /", "= np.array(anchor).reshape((-1, 4)) box_centers = bboxs[:, :2] * scale[:2] #", "cy, k) priors += prior self._prior_vis(prior, image, k, tb_writer=tb_writer) output", "self).__init__(cfg) # self.image_size = cfg['image_size'] self._cfg_list = ['MIN_SIZES', 'MAX_SIZES', 'ASPECT_RATIOS']", "test with image if isinstance(image, type(None)): image = np.ones((self.image_size[1], self.image_size[0],", "= bboxs.astype(np.int32) # visualize each anchor box on a feature", "signature writer = tb_writer.writer prior_num = self.num_priors[feat_idx] # transform coordinates", "bboxs_): cv2.circle(image, (archor[0], archor[1]), 1, (0, 0, 255), -1) if", "zip(cfg['feature_maps'], cfg['feature_maps'])] # cfg['image_size'] = [300, 300] # feat_dim =", "1, (0, 0, 255), -1) if archor[0] == archor[1]: #", "copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19, 10, 5, 3, 1] cfg['min_sizes']", "10, 5, 3, 1] cfg['min_sizes'] = [[30], [60], 111, 162,", "box if self._flip: prior += [cx, cy, s_j / sqrt(ar),", "size\"\"\" assert self._prior_cfg is not {} return [int(len(self._create_prior(0, 0, k))", "[cx, cy, s_j / sqrt(ar), s_i * sqrt(ar)] return prior", "self.image_size[0], self.image_size[1], self.image_size[0]] bboxs = np.array(anchor).reshape((-1, 4)) box_centers = bboxs[:,", "copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19, 10, 5, 3, 1] cfg['flip']", "__future__ import division from math import sqrt as sqrt from", "str): image = cv2.imread(image, -1) image = cv2.resize(image, (self.image_size[1], self.image_size[0]))", "in zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg) print(p.num_priors) p1 = p.forward(feat_dim)", "self._variance: if v <= 0: raise ValueError('Variances must be greater", "if len(cfg.MODEL[item]) != num_feat and len(cfg.MODEL[item]) != 0: raise Exception(\"config", "self.image_size[0] / self._steps[k] cx = (j + 0.5) / steps_x", "None return wrapper class PriorBoxBase(object): \"\"\"Compute priorbox coordinates in center-offset", "[300, 300] # feat_dim = [list(a) for a in zip(cfg['feature_maps'],", "vis(func): \"\"\"tensorboard visualization if has writer as input\"\"\" def wrapper(*args,", "center x,y cy = (i + 0.5) / steps_y prior", "test_rectangle(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19, 10,", "cfg.MODEL[item] @property def num_priors(self): \"\"\"allow prior num calculation before knowing", "num calculation before knowing feature map size\"\"\" assert self._prior_cfg is", "should clip on [xmin, ymin, xmax, ymax] if self._clip: output.clamp_(max=1,", "255), -1) if archor[0] == archor[1]: # only show diagnal", "162, 213, 264] cfg['flip'] = True # feat_dim = [list(a)", "length does not match step length!\".format(item)) self._prior_cfg[item] = cfg.MODEL[item] @property", "division from math import sqrt as sqrt from itertools import", "scale[:2] # [x, y] # bboxs: [xmin, ymin, xmax, ymax]", "= cfg.MODEL.IMAGE_SIZE self._steps = cfg.MODEL.STEPS self._cfg_list = [] self._prior_cfg =", "cfg.MODEL.STEPS self._cfg_list = [] self._prior_cfg = {} self._clip = cfg.MODEL.CLIP", "= copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19, 10, 5, 3, 1]", "= [list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg)", "== archor[1]: # only show diagnal anchors cv2.rectangle(image, (bbox[0], bbox[1]),", "tb_writer) # test_filp(cfg, tb_writer) # test_rectangle(cfg, tb_writer) print('haha') from lib.utils.config", "/ self.image_size[0] s_j = ms / self.image_size[1] prior += [cx,", "for a in zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg) p1 =", "* 2 for item in cfg['feature_maps']])] # cfg['image_size'] = [300,", "path to the signature writer = tb_writer.writer prior_num = self.num_priors[feat_idx]", "steps_y prior += self._create_prior(cx, cy, k) priors += prior self._prior_vis(prior,", "* prior_idx::prior_num, :] for archor, bbox in zip(box_centers_, bboxs_): cv2.circle(image,", "for k in range(len(self._steps))] def _create_prior(self, cx, cy, k): raise", "visualization if has writer as input\"\"\" def wrapper(*args, **kw): return", "self._clip = cfg.MODEL.CLIP self._variance = cfg.MODEL.VARIANCE for v in self._variance:", "* 2 for item in cfg['feature_maps']], cfg['feature_maps'])] cfg['image_size'] = [600,", "product as product import torch import numpy as np import", "not None else None return wrapper class PriorBoxBase(object): \"\"\"Compute priorbox", "self._steps[k] cx = (j + 0.5) / steps_x # unit", "box_centers = bboxs[:, :2] * scale[:2] # [x, y] #", "print(p1) def test_filp(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38,", "**kw) if kw['tb_writer'] is not None else None return wrapper", "zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer) cfg['flip']", "[list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg) print(p.num_priors)", "for archor, bbox in zip(box_centers_, bboxs_): cv2.circle(image, (archor[0], archor[1]), 1,", "self).__init__() self.image_size = cfg.MODEL.IMAGE_SIZE self._steps = cfg.MODEL.STEPS self._cfg_list = []", "= p.forward(feat_dim, tb_writer=tb_writer) # print(p2) assert (p2 - p1).sum() <", "image @vis def _prior_vis(self, anchor, image_ori, feat_idx, tb_writer=None): # TODO", "cfg['image_size'] = [300, 600] feat_dim = [list(a) for a in", "feat_dim = [list(a) for a in zip(cfg['feature_maps'], [item * 2", "p = PriorBox(cfg) p2 = p.forward(feat_dim, tb_writer=tb_writer) # print(p2) assert", "self._cfg_list: if item not in cfg.MODEL: raise Exception(\"wrong anchor config!\")", "isinstance(min_sizes, list) else min_sizes for ms in min_sizes: # min", "5, 3, 1] cfg['min_sizes'] = [30, 60, 111, 162, 213,", "xmax, ymax] bboxs = np.hstack((bboxs[:, :2] - bboxs[:, 2:4] /", "image = np.ones((self.image_size[1], self.image_size[0], 3)) elif isinstance(image, str): image =", "in range(len(self._steps))] def _create_prior(self, cx, cy, k): raise NotImplementedError @vis", "bbox[3]), (0, 255, 0), 1) image = image[..., ::-1] image", "[38, 19, 10, 5, 3, 1] cfg['flip'] = True feat_dim", "4) for k in range(len(self._steps))] def _create_prior(self, cx, cy, k):", "prior += [cx, cy, s_j / sqrt(ar), s_i * sqrt(ar)]", "s_j_prime, s_i_prime] # rectangles by min and aspect ratio for", "in zip(box_centers_, bboxs_): cv2.circle(image, (archor[0], archor[1]), 1, (0, 0, 255),", "ymax] bboxs = np.hstack((bboxs[:, :2] - bboxs[:, 2:4] / 2,", "_create_prior(self, cx, cy, k): # as the original paper do", "cv2 from lib.utils.visualize_utils import TBWriter def vis(func): \"\"\"tensorboard visualization if", "as cfg # from lib.utils.visualize_utils import TBWriter # tb_writer =", "TBWriter # tb_writer = TBWriter(log_dir, {'epoch': 50}) # # test_no_vis(cfg,", "kw['tb_writer'] is not None else None return wrapper class PriorBoxBase(object):", "# tb_writer = TBWriter(log_dir, {'epoch': 50}) # # test_no_vis(cfg, tb_writer)", "sqrt from itertools import product as product import torch import", "[item * 2 for item in cfg['feature_maps']])] # cfg['image_size'] =", "p1 = p.forward(feat_dim, tb_writer=tb_writer) print(p1.shape) if __name__ == '__main__': import", "length!\".format(item)) self._prior_cfg[item] = cfg.MODEL[item] @property def num_priors(self): \"\"\"allow prior num", "= sqrt(s_j * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[1])) prior += [cx, cy,", "255, 0), 1) image = image[..., ::-1] image = image.transpose((2,0,1))", "cy = (i + 0.5) / steps_y prior += self._create_prior(cx,", "False cfg['aspect_ratios'] = [[2, 1 / 2], [2, 1 /", "in zip([item * 2 for item in cfg['feature_maps']], cfg['feature_maps'])] cfg['image_size']", "prior += [cx, cy, s_j_prime, s_i_prime] # rectangles by min", "[list(a) for a in zip([item * 2 for item in", "def wrapper(*args, **kw): return func(*args, **kw) if kw['tb_writer'] is not", "__init__(self, cfg): super(PriorBoxBase, self).__init__() self.image_size = cfg.MODEL.IMAGE_SIZE self._steps = cfg.MODEL.STEPS", "- bboxs[:, 2:4] / 2, bboxs[:, :2] + bboxs[:, 2:4]", "in cfg['feature_maps']], cfg['feature_maps'])] cfg['image_size'] = [600, 300] p = PriorBoxSSD(cfg)", "cfg): super(PriorBoxBase, self).__init__() self.image_size = cfg.MODEL.IMAGE_SIZE self._steps = cfg.MODEL.STEPS self._cfg_list", "Exception(\"wrong anchor config!\") if len(cfg.MODEL[item]) != num_feat and len(cfg.MODEL[item]) !=", "do prior = [] min_sizes = self._prior_cfg['MIN_SIZES'][k] min_sizes = [min_sizes]", "import cv2 from lib.utils.visualize_utils import TBWriter def vis(func): \"\"\"tensorboard visualization", "ratio for ar in self._prior_cfg['ASPECT_RATIOS'][k]: prior += [cx, cy, s_j", "p = PriorBoxSSD(cfg) print(p.num_priors) p1 = p.forward(feat_dim) print(p1) def test_filp(cfg,", "1) image = image[..., ::-1] image = image.transpose((2,0,1)) writer.add_image('base/feature_map_{}_{}'.format(feat_idx, prior_idx),", "num_feat and len(cfg.MODEL[item]) != 0: raise Exception(\"config {} length does", "in zip(cfg['feature_maps'], [item * 2 for item in cfg['feature_maps']])] #", "[2, 1 / 2, 3, 1 / 3], [2, 1", "= cfg.MODEL.FLIP self._setup(cfg) def _create_prior(self, cx, cy, k): # as", "map for prior_idx in range(prior_num): image = image_ori.copy() bboxs_ =", "cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19, 10, 5, 3,", "= [600, 300] p = PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer)", "1 / 2], [2, 1 / 2, 3, 1 /", "rectangles by min and aspect ratio for ar in self._prior_cfg['ASPECT_RATIOS'][k]:", "def test_rectangle(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19,", "image if isinstance(image, type(None)): image = np.ones((self.image_size[1], self.image_size[0], 3)) elif", "output class PriorBoxSSD(PriorBoxBase): def __init__(self, cfg): super(PriorBoxSSD, self).__init__(cfg) # self.image_size", "output = torch.Tensor(priors).view(-1, 4) # TODO this clip is meanless,", "# as the original paper do prior = [] min_sizes", "!= 0: assert type(self._prior_cfg['MAX_SIZES'][k]) is not list # one max", "TODO add output path to the signature writer = tb_writer.writer", "does not match step length!\".format(item)) self._prior_cfg[item] = cfg.MODEL[item] @property def", "3, 1 / 3], [2, 1 / 2], [2, 1", "a in zip([item * 2 for item in cfg['feature_maps']], cfg['feature_maps'])]", "raise NotImplementedError @vis def _image_proc(self, image=None, tb_writer=None): # TODO test", "[] for i, j in product(range(layer_dims[k][0]), range(layer_dims[k][1])): steps_x = self.image_size[1]", "len(cfg.MODEL[item]) != 0: raise Exception(\"config {} length does not match", "box_centers = box_centers.astype(np.int32) bboxs = bboxs.astype(np.int32) # visualize each anchor", "self.image_size[1], self.image_size[0]] bboxs = np.array(anchor).reshape((-1, 4)) box_centers = bboxs[:, :2]", "math import sqrt as sqrt from itertools import product as", "isinstance(image, type(None)): image = np.ones((self.image_size[1], self.image_size[0], 3)) elif isinstance(image, str):", "\"\"\" def __init__(self, cfg): super(PriorBoxBase, self).__init__() self.image_size = cfg.MODEL.IMAGE_SIZE self._steps", "step length!\".format(item)) self._prior_cfg[item] = cfg.MODEL[item] @property def num_priors(self): \"\"\"allow prior", "for a in zip([item * 2 for item in cfg['feature_maps']],", "a vertical box if self._flip: prior += [cx, cy, s_j", "min_sizes for ms in min_sizes: # min square s_i =", "isinstance(image, str): image = cv2.imread(image, -1) image = cv2.resize(image, (self.image_size[1],", "sqrt(ar), s_i / sqrt(ar)] # a vertical box if self._flip:", "aspect ratio for ar in self._prior_cfg['ASPECT_RATIOS'][k]: prior += [cx, cy,", "self._flip: prior += [cx, cy, s_j / sqrt(ar), s_i *", "= PriorBoxSSD def test_no_vis(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] =", "class PriorBoxSSD(PriorBoxBase): def __init__(self, cfg): super(PriorBoxSSD, self).__init__(cfg) # self.image_size =", "self.image_size = cfg.MODEL.IMAGE_SIZE self._steps = cfg.MODEL.STEPS self._cfg_list = [] self._prior_cfg", "= [38, 19, 10, 5, 3, 1] cfg['flip'] = True", "4) # TODO this clip is meanless, should clip on", "'MAX_SIZES', 'ASPECT_RATIOS'] self._flip = cfg.MODEL.FLIP self._setup(cfg) def _create_prior(self, cx, cy,", "* (self._prior_cfg['MAX_SIZES'][k] / self.image_size[0])) s_j_prime = sqrt(s_j * (self._prior_cfg['MAX_SIZES'][k] /", "bboxs[:, :2] + bboxs[:, 2:4] / 2)) * scale box_centers", "1 / 2]] p = PriorBox(cfg) p2 = p.forward(feat_dim, tb_writer=tb_writer)", "prior # PriorBox = PriorBoxSSD def test_no_vis(cfg, tb_writer): cfg =", "in cfg.MODEL: raise Exception(\"wrong anchor config!\") if len(cfg.MODEL[item]) != num_feat", "[list(a) for a in zip(cfg['feature_maps'], [item * 2 for item", "else min_sizes for ms in min_sizes: # min square s_i", "k): # as the original paper do prior = []", "111, 162, 213, 264] cfg['flip'] = True feat_dim = [list(a)", "p1 = p.forward(feat_dim, tb_writer=tb_writer) cfg['flip'] = False cfg['aspect_ratios'] = [[2,", ":2] + bboxs[:, 2:4] / 2)) * scale box_centers =", "/ self.image_size[1])) prior += [cx, cy, s_j_prime, s_i_prime] # rectangles", "def _setup(self, cfg): num_feat = len(self._steps) for item in self._cfg_list:", "anchors cv2.rectangle(image, (bbox[0], bbox[1]), (bbox[2], bbox[3]), (0, 255, 0), 1)", "prior self._prior_vis(prior, image, k, tb_writer=tb_writer) output = torch.Tensor(priors).view(-1, 4) #", "= self.image_size[0] / self._steps[k] cx = (j + 0.5) /", "s_j = ms / self.image_size[1] prior += [cx, cy, s_j,", "# test_filp(cfg, tb_writer) # test_rectangle(cfg, tb_writer) print('haha') from lib.utils.config import", "tb_writer=tb_writer) cfg['flip'] = False cfg['aspect_ratios'] = [[2, 1 / 2],", "from math import sqrt as sqrt from itertools import product", "[2, 1 / 2]] p = PriorBox(cfg) p2 = p.forward(feat_dim,", "**kw): return func(*args, **kw) if kw['tb_writer'] is not None else", "None else None return wrapper class PriorBoxBase(object): \"\"\"Compute priorbox coordinates", "/ self._steps[k] steps_y = self.image_size[0] / self._steps[k] cx = (j", "for i, j in product(range(layer_dims[k][0]), range(layer_dims[k][1])): steps_x = self.image_size[1] /", "if has writer as input\"\"\" def wrapper(*args, **kw): return func(*args,", "cfg): num_feat = len(self._steps) for item in self._cfg_list: if item", "priors = [] image = self._image_proc(image=image, tb_writer=tb_writer) for k in", "2:4] / 2, bboxs[:, :2] + bboxs[:, 2:4] / 2))", "test_no_vis(cfg, tb_writer) # test_filp(cfg, tb_writer) # test_rectangle(cfg, tb_writer) print('haha') from", "form for each source feature map. \"\"\" def __init__(self, cfg):", "zip(cfg['feature_maps'], [item * 2 for item in cfg['feature_maps']])] # cfg['image_size']", "min_sizes = self._prior_cfg['MIN_SIZES'][k] min_sizes = [min_sizes] if not isinstance(min_sizes, list)", "self.image_size[0] s_j = ms / self.image_size[1] prior += [cx, cy,", "2]] p = PriorBox(cfg) p2 = p.forward(feat_dim, tb_writer=tb_writer) # print(p2)", "= sqrt(s_i * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[0])) s_j_prime = sqrt(s_j *", "a in zip(cfg['feature_maps'], [item * 2 for item in cfg['feature_maps']])]", "self.image_size[0])) s_j_prime = sqrt(s_j * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[1])) prior +=", "be greater than 0') def _setup(self, cfg): num_feat = len(self._steps)", "this clip is meanless, should clip on [xmin, ymin, xmax,", "np.ones((self.image_size[1], self.image_size[0], 3)) elif isinstance(image, str): image = cv2.imread(image, -1)", "prior_idx), image, 2) def forward(self, layer_dims, tb_writer=None, image=None): priors =", "[list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])] # cfg['image_size'] = [300,", "item in self._cfg_list: if item not in cfg.MODEL: raise Exception(\"wrong", "cfg['aspect_ratios'] = [[2, 1 / 2], [2, 1 / 2,", "item not in cfg.MODEL: raise Exception(\"wrong anchor config!\") if len(cfg.MODEL[item])", "= len(self._steps) for item in self._cfg_list: if item not in", "lib.datasets.config import ssd_voc_vgg as cfg # from lib.utils.visualize_utils import TBWriter", "cv2.circle(image, (archor[0], archor[1]), 1, (0, 0, 255), -1) if archor[0]", "cfg['feature_maps'] = [38, 19, 10, 5, 3, 1] cfg['min_sizes'] =", "= PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer) print(p1.shape) if __name__ ==", "tb_writer=None): # TODO test with image if isinstance(image, type(None)): image", "output.clamp_(max=1, min=0) return output class PriorBoxSSD(PriorBoxBase): def __init__(self, cfg): super(PriorBoxSSD,", "import numpy as np import cv2 from lib.utils.visualize_utils import TBWriter", "with image if isinstance(image, type(None)): image = np.ones((self.image_size[1], self.image_size[0], 3))", "cfg['feature_maps'] = [38, 19, 10, 5, 3, 1] cfg['flip'] =", "a in zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg) p1 = p.forward(feat_dim,", "cfg.MODEL.VARIANCE for v in self._variance: if v <= 0: raise", "* sqrt(ar), s_i / sqrt(ar)] # a vertical box if", "for a in zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg) print(p.num_priors) p1", "one max size per layer s_i_prime = sqrt(s_i * (self._prior_cfg['MAX_SIZES'][k]", "feature map for prior_idx in range(prior_num): image = image_ori.copy() bboxs_", "sqrt(ar)] return prior # PriorBox = PriorBoxSSD def test_no_vis(cfg, tb_writer):", "/ 2, 3, 1 / 3], [2, 1 / 2,", "213, 264] cfg['flip'] = True feat_dim = [list(a) for a", "bboxs.astype(np.int32) # visualize each anchor box on a feature map", "# transform coordinates scale = [self.image_size[1], self.image_size[0], self.image_size[1], self.image_size[0]] bboxs", "is not list # one max size per layer s_i_prime", "* (self._prior_cfg['MAX_SIZES'][k] / self.image_size[1])) prior += [cx, cy, s_j_prime, s_i_prime]", "(j + 0.5) / steps_x # unit center x,y cy", "/ 2], [2, 1 / 2, 3, 1 / 3],", "[xmin, ymin, xmax, ymax] bboxs = np.hstack((bboxs[:, :2] - bboxs[:,", "= bboxs[:, :2] * scale[:2] # [x, y] # bboxs:", "= cfg.MODEL.VARIANCE for v in self._variance: if v <= 0:", "[[2, 1 / 2], [2, 1 / 2, 3, 1", "2], [2, 1 / 2, 3, 1 / 3], [2,", "return [int(len(self._create_prior(0, 0, k)) / 4) for k in range(len(self._steps))]", "for v in self._variance: if v <= 0: raise ValueError('Variances", "min=0) return output class PriorBoxSSD(PriorBoxBase): def __init__(self, cfg): super(PriorBoxSSD, self).__init__(cfg)", "only show diagnal anchors cv2.rectangle(image, (bbox[0], bbox[1]), (bbox[2], bbox[3]), (0,", "each source feature map. \"\"\" def __init__(self, cfg): super(PriorBoxBase, self).__init__()", "::-1] image = image.transpose((2,0,1)) writer.add_image('base/feature_map_{}_{}'.format(feat_idx, prior_idx), image, 2) def forward(self,", "3], [2, 1 / 2], [2, 1 / 2]] p", "sqrt(s_j * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[1])) prior += [cx, cy, s_j_prime,", "assert (p2 - p1).sum() < 1e-8 def test_rectangle(cfg, tb_writer): cfg", "range(len(self._steps))] def _create_prior(self, cx, cy, k): raise NotImplementedError @vis def", "image, 2) def forward(self, layer_dims, tb_writer=None, image=None): priors = []", "0: raise Exception(\"config {} length does not match step length!\".format(item))", "= [[30], [60], 111, 162, 213, 264] cfg['flip'] = True", "1e-8 def test_rectangle(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38,", "diagnal anchors cv2.rectangle(image, (bbox[0], bbox[1]), (bbox[2], bbox[3]), (0, 255, 0),", "feat_dim = [list(a) for a in zip([item * 2 for", "num_feat = len(self._steps) for item in self._cfg_list: if item not", "self._setup(cfg) def _create_prior(self, cx, cy, k): # as the original", "def _prior_vis(self, anchor, image_ori, feat_idx, tb_writer=None): # TODO add output", "min square s_i = ms / self.image_size[0] s_j = ms", "self._cfg_list = ['MIN_SIZES', 'MAX_SIZES', 'ASPECT_RATIOS'] self._flip = cfg.MODEL.FLIP self._setup(cfg) def", "/ 2, bboxs[:, :2] + bboxs[:, 2:4] / 2)) *", "2, 3, 1 / 3], [2, 1 / 2], [2,", "self.num_priors[feat_idx] # transform coordinates scale = [self.image_size[1], self.image_size[0], self.image_size[1], self.image_size[0]]", "# from lib.datasets.config import ssd_voc_vgg as cfg # from lib.utils.visualize_utils", "# rectangles by min and aspect ratio for ar in", "bboxs[:, 2:4] / 2, bboxs[:, :2] + bboxs[:, 2:4] /", "for item in cfg['feature_maps']])] # cfg['image_size'] = [300, 600] feat_dim", "= self._prior_cfg['MIN_SIZES'][k] min_sizes = [min_sizes] if not isinstance(min_sizes, list) else", "cv2.resize(image, (self.image_size[1], self.image_size[0])) return image @vis def _prior_vis(self, anchor, image_ori,", "prior_idx::prior_num, :] for archor, bbox in zip(box_centers_, bboxs_): cv2.circle(image, (archor[0],", "2, 3, 1 / 3], [2, 1 / 2, 3,", "if isinstance(image, type(None)): image = np.ones((self.image_size[1], self.image_size[0], 3)) elif isinstance(image,", "self.image_size = cfg['image_size'] self._cfg_list = ['MIN_SIZES', 'MAX_SIZES', 'ASPECT_RATIOS'] self._flip =", "[cx, cy, s_j, s_i] # min max square if len(self._prior_cfg['MAX_SIZES'])", "if not isinstance(min_sizes, list) else min_sizes for ms in min_sizes:", "p.forward(feat_dim, tb_writer=tb_writer) cfg['flip'] = False cfg['aspect_ratios'] = [[2, 1 /", "num_priors(self): \"\"\"allow prior num calculation before knowing feature map size\"\"\"", "not {} return [int(len(self._create_prior(0, 0, k)) / 4) for k", "per layer s_i_prime = sqrt(s_i * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[0])) s_j_prime", "as input\"\"\" def wrapper(*args, **kw): return func(*args, **kw) if kw['tb_writer']", "def test_filp(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19,", "= cfg.MODEL.STEPS self._cfg_list = [] self._prior_cfg = {} self._clip =", "TODO this clip is meanless, should clip on [xmin, ymin,", "['MIN_SIZES', 'MAX_SIZES', 'ASPECT_RATIOS'] self._flip = cfg.MODEL.FLIP self._setup(cfg) def _create_prior(self, cx,", "= np.ones((self.image_size[1], self.image_size[0], 3)) elif isinstance(image, str): image = cv2.imread(image,", "+= [cx, cy, s_j * sqrt(ar), s_i / sqrt(ar)] #", "[60], 111, 162, 213, 264] cfg['flip'] = True feat_dim =", "image = self._image_proc(image=image, tb_writer=tb_writer) for k in range(len(layer_dims)): prior =", "* scale[:2] # [x, y] # bboxs: [xmin, ymin, xmax,", "the signature writer = tb_writer.writer prior_num = self.num_priors[feat_idx] # transform", "600] feat_dim = [list(a) for a in zip([item * 2", "0: raise ValueError('Variances must be greater than 0') def _setup(self,", "not list # one max size per layer s_i_prime =", "if v <= 0: raise ValueError('Variances must be greater than", "= ms / self.image_size[1] prior += [cx, cy, s_j, s_i]", ":] for archor, bbox in zip(box_centers_, bboxs_): cv2.circle(image, (archor[0], archor[1]),", "\"\"\"tensorboard visualization if has writer as input\"\"\" def wrapper(*args, **kw):", "match step length!\".format(item)) self._prior_cfg[item] = cfg.MODEL[item] @property def num_priors(self): \"\"\"allow", "np.hstack((bboxs[:, :2] - bboxs[:, 2:4] / 2, bboxs[:, :2] +", "image_ori.copy() bboxs_ = bboxs[prior_idx::prior_num, :] box_centers_ = box_centers[4 * prior_idx::prior_num,", "len(self._steps) for item in self._cfg_list: if item not in cfg.MODEL:", "* scale box_centers = box_centers.astype(np.int32) bboxs = bboxs.astype(np.int32) # visualize", "= ['MIN_SIZES', 'MAX_SIZES', 'ASPECT_RATIOS'] self._flip = cfg.MODEL.FLIP self._setup(cfg) def _create_prior(self,", "output path to the signature writer = tb_writer.writer prior_num =", "on a feature map for prior_idx in range(prior_num): image =", "original paper do prior = [] min_sizes = self._prior_cfg['MIN_SIZES'][k] min_sizes", "cfg['image_size'] = [300, 300] # feat_dim = [list(a) for a", "self._prior_cfg['ASPECT_RATIOS'][k]: prior += [cx, cy, s_j * sqrt(ar), s_i /", "cy, s_j * sqrt(ar), s_i / sqrt(ar)] # a vertical", "+= self._create_prior(cx, cy, k) priors += prior self._prior_vis(prior, image, k,", "bboxs = bboxs.astype(np.int32) # visualize each anchor box on a", "in zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer)", "3, 1 / 3], [2, 1 / 2, 3, 1", "= box_centers.astype(np.int32) bboxs = bboxs.astype(np.int32) # visualize each anchor box", "i, j in product(range(layer_dims[k][0]), range(layer_dims[k][1])): steps_x = self.image_size[1] / self._steps[k]", "{} length does not match step length!\".format(item)) self._prior_cfg[item] = cfg.MODEL[item]", "ymin, xmax, ymax] if self._clip: output.clamp_(max=1, min=0) return output class", "return output class PriorBoxSSD(PriorBoxBase): def __init__(self, cfg): super(PriorBoxSSD, self).__init__(cfg) #", "# cfg['image_size'] = [300, 300] # feat_dim = [list(a) for", "p2 = p.forward(feat_dim, tb_writer=tb_writer) # print(p2) assert (p2 - p1).sum()", "image = cv2.resize(image, (self.image_size[1], self.image_size[0])) return image @vis def _prior_vis(self,", "meanless, should clip on [xmin, ymin, xmax, ymax] if self._clip:", "(self._prior_cfg['MAX_SIZES'][k] / self.image_size[0])) s_j_prime = sqrt(s_j * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[1]))", "as the original paper do prior = [] min_sizes =", "= image_ori.copy() bboxs_ = bboxs[prior_idx::prior_num, :] box_centers_ = box_centers[4 *", "[] self._prior_cfg = {} self._clip = cfg.MODEL.CLIP self._variance = cfg.MODEL.VARIANCE", "[list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg) p1", "knowing feature map size\"\"\" assert self._prior_cfg is not {} return", "self._flip = cfg.MODEL.FLIP self._setup(cfg) def _create_prior(self, cx, cy, k): #", "__name__ == '__main__': import copy # from lib.datasets.config import ssd_voc_vgg", "/ 3], [2, 1 / 2], [2, 1 / 2]]", "2, bboxs[:, :2] + bboxs[:, 2:4] / 2)) * scale", "as sqrt from itertools import product as product import torch", "= [] for i, j in product(range(layer_dims[k][0]), range(layer_dims[k][1])): steps_x =", "= True feat_dim = [list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])]", "# one max size per layer s_i_prime = sqrt(s_i *", "162, 213, 264] cfg['flip'] = True feat_dim = [list(a) for", "+ 0.5) / steps_y prior += self._create_prior(cx, cy, k) priors", "input\"\"\" def wrapper(*args, **kw): return func(*args, **kw) if kw['tb_writer'] is", "5, 3, 1] cfg['min_sizes'] = [[30], [60], 111, 162, 213,", "ymin, xmax, ymax] bboxs = np.hstack((bboxs[:, :2] - bboxs[:, 2:4]", "ar in self._prior_cfg['ASPECT_RATIOS'][k]: prior += [cx, cy, s_j * sqrt(ar),", "must be greater than 0') def _setup(self, cfg): num_feat =", "1 / 2], [2, 1 / 2]] p = PriorBox(cfg)", "cx = (j + 0.5) / steps_x # unit center", "= [min_sizes] if not isinstance(min_sizes, list) else min_sizes for ms", "ms / self.image_size[1] prior += [cx, cy, s_j, s_i] #", "-1) if archor[0] == archor[1]: # only show diagnal anchors", "# test_no_vis(cfg, tb_writer) # test_filp(cfg, tb_writer) # test_rectangle(cfg, tb_writer) print('haha')", "# self.image_size = cfg['image_size'] self._cfg_list = ['MIN_SIZES', 'MAX_SIZES', 'ASPECT_RATIOS'] self._flip", "tb_writer=tb_writer) # print(p2) assert (p2 - p1).sum() < 1e-8 def", "s_j * sqrt(ar), s_i / sqrt(ar)] # a vertical box", "each anchor box on a feature map for prior_idx in", "unit center x,y cy = (i + 0.5) / steps_y", "NotImplementedError @vis def _image_proc(self, image=None, tb_writer=None): # TODO test with", "print(p2) assert (p2 - p1).sum() < 1e-8 def test_rectangle(cfg, tb_writer):", "[min_sizes] if not isinstance(min_sizes, list) else min_sizes for ms in", "= cfg.MODEL.CLIP self._variance = cfg.MODEL.VARIANCE for v in self._variance: if", "3, 1] cfg['flip'] = True feat_dim = [list(a) for a", "# min square s_i = ms / self.image_size[0] s_j =", "= tb_writer.writer prior_num = self.num_priors[feat_idx] # transform coordinates scale =", "def __init__(self, cfg): super(PriorBoxBase, self).__init__() self.image_size = cfg.MODEL.IMAGE_SIZE self._steps =", "- p1).sum() < 1e-8 def test_rectangle(cfg, tb_writer): cfg = copy.deepcopy(cfg)", "= image[..., ::-1] image = image.transpose((2,0,1)) writer.add_image('base/feature_map_{}_{}'.format(feat_idx, prior_idx), image, 2)", "{'epoch': 50}) # # test_no_vis(cfg, tb_writer) # test_filp(cfg, tb_writer) #", "map. \"\"\" def __init__(self, cfg): super(PriorBoxBase, self).__init__() self.image_size = cfg.MODEL.IMAGE_SIZE", "PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer) print(p1.shape) if __name__ == '__main__':", "# visualize each anchor box on a feature map for", "import TBWriter def vis(func): \"\"\"tensorboard visualization if has writer as", "+= [cx, cy, s_j, s_i] # min max square if", "bboxs[:, :2] * scale[:2] # [x, y] # bboxs: [xmin,", "True # feat_dim = [list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])]", "cy, k): raise NotImplementedError @vis def _image_proc(self, image=None, tb_writer=None): #", "= [38, 19, 10, 5, 3, 1] cfg['min_sizes'] = [[30],", "coordinates scale = [self.image_size[1], self.image_size[0], self.image_size[1], self.image_size[0]] bboxs = np.array(anchor).reshape((-1,", "anchor, image_ori, feat_idx, tb_writer=None): # TODO add output path to", "for k in range(len(layer_dims)): prior = [] for i, j", "cfg['feature_maps']], cfg['feature_maps'])] cfg['image_size'] = [600, 300] p = PriorBoxSSD(cfg) p1", "cfg['min_sizes'] = [[30], [60], 111, 162, 213, 264] cfg['flip'] =", "tb_writer = TBWriter(log_dir, {'epoch': 50}) # # test_no_vis(cfg, tb_writer) #", "3, 1] cfg['min_sizes'] = [[30], [60], 111, 162, 213, 264]", "calculation before knowing feature map size\"\"\" assert self._prior_cfg is not", "0.5) / steps_x # unit center x,y cy = (i", "cy, s_j_prime, s_i_prime] # rectangles by min and aspect ratio", "feat_dim = [list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])] p =", "elif isinstance(image, str): image = cv2.imread(image, -1) image = cv2.resize(image,", "bboxs_ = bboxs[prior_idx::prior_num, :] box_centers_ = box_centers[4 * prior_idx::prior_num, :]", "range(layer_dims[k][1])): steps_x = self.image_size[1] / self._steps[k] steps_y = self.image_size[0] /", "2:4] / 2)) * scale box_centers = box_centers.astype(np.int32) bboxs =", "in self._variance: if v <= 0: raise ValueError('Variances must be", "self.image_size[1])) prior += [cx, cy, s_j_prime, s_i_prime] # rectangles by", "# cfg['image_size'] = [300, 600] feat_dim = [list(a) for a", "300] # feat_dim = [list(a) for a in zip(cfg['feature_maps'], [item", "= box_centers[4 * prior_idx::prior_num, :] for archor, bbox in zip(box_centers_,", "clip is meanless, should clip on [xmin, ymin, xmax, ymax]", "3, 1] cfg['min_sizes'] = [30, 60, 111, 162, 213, 264]", "def __init__(self, cfg): super(PriorBoxSSD, self).__init__(cfg) # self.image_size = cfg['image_size'] self._cfg_list", "\"\"\"allow prior num calculation before knowing feature map size\"\"\" assert", "archor[0] == archor[1]: # only show diagnal anchors cv2.rectangle(image, (bbox[0],", "/ self._steps[k] cx = (j + 0.5) / steps_x #", "if item not in cfg.MODEL: raise Exception(\"wrong anchor config!\") if", "(p2 - p1).sum() < 1e-8 def test_rectangle(cfg, tb_writer): cfg =", "x,y cy = (i + 0.5) / steps_y prior +=", "_create_prior(self, cx, cy, k): raise NotImplementedError @vis def _image_proc(self, image=None,", "zip(cfg['feature_maps'], cfg['feature_maps'])] p = PriorBoxSSD(cfg) print(p.num_priors) p1 = p.forward(feat_dim) print(p1)", "{} return [int(len(self._create_prior(0, 0, k)) / 4) for k in", "1] cfg['flip'] = True feat_dim = [list(a) for a in", "steps_x = self.image_size[1] / self._steps[k] steps_y = self.image_size[0] / self._steps[k]", "steps_y = self.image_size[0] / self._steps[k] cx = (j + 0.5)", "writer = tb_writer.writer prior_num = self.num_priors[feat_idx] # transform coordinates scale", ":2] - bboxs[:, 2:4] / 2, bboxs[:, :2] + bboxs[:,", "+= [cx, cy, s_j / sqrt(ar), s_i * sqrt(ar)] return", "anchor config!\") if len(cfg.MODEL[item]) != num_feat and len(cfg.MODEL[item]) != 0:", "[] min_sizes = self._prior_cfg['MIN_SIZES'][k] min_sizes = [min_sizes] if not isinstance(min_sizes,", "/ 2], [2, 1 / 2]] p = PriorBox(cfg) p2", "10, 5, 3, 1] cfg['min_sizes'] = [30, 60, 111, 162,", "a in zip(cfg['feature_maps'], cfg['feature_maps'])] # cfg['image_size'] = [300, 300] #", "raise Exception(\"wrong anchor config!\") if len(cfg.MODEL[item]) != num_feat and len(cfg.MODEL[item])", "= [] image = self._image_proc(image=image, tb_writer=tb_writer) for k in range(len(layer_dims)):", "box on a feature map for prior_idx in range(prior_num): image", "k)) / 4) for k in range(len(self._steps))] def _create_prior(self, cx,", "\"\"\"Compute priorbox coordinates in center-offset form for each source feature", "j in product(range(layer_dims[k][0]), range(layer_dims[k][1])): steps_x = self.image_size[1] / self._steps[k] steps_y", "assert type(self._prior_cfg['MAX_SIZES'][k]) is not list # one max size per", "= True # feat_dim = [list(a) for a in zip(cfg['feature_maps'],", "ms / self.image_size[0] s_j = ms / self.image_size[1] prior +=", "if __name__ == '__main__': import copy # from lib.datasets.config import", "/ sqrt(ar)] # a vertical box if self._flip: prior +=", "[int(len(self._create_prior(0, 0, k)) / 4) for k in range(len(self._steps))] def", "-1) image = cv2.resize(image, (self.image_size[1], self.image_size[0])) return image @vis def", "# PriorBox = PriorBoxSSD def test_no_vis(cfg, tb_writer): cfg = copy.deepcopy(cfg)", "import copy # from lib.datasets.config import ssd_voc_vgg as cfg #", "+= prior self._prior_vis(prior, image, k, tb_writer=tb_writer) output = torch.Tensor(priors).view(-1, 4)", "feat_idx, tb_writer=None): # TODO add output path to the signature", "prior = [] for i, j in product(range(layer_dims[k][0]), range(layer_dims[k][1])): steps_x", "PriorBoxBase(object): \"\"\"Compute priorbox coordinates in center-offset form for each source", "in zip(cfg['feature_maps'], cfg['feature_maps'])] # cfg['image_size'] = [300, 300] # feat_dim", "[300, 600] feat_dim = [list(a) for a in zip([item *", "= self._image_proc(image=image, tb_writer=tb_writer) for k in range(len(layer_dims)): prior = []", "cfg['feature_maps'])] p = PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer) cfg['flip'] =", "greater than 0') def _setup(self, cfg): num_feat = len(self._steps) for", "len(cfg.MODEL[item]) != num_feat and len(cfg.MODEL[item]) != 0: raise Exception(\"config {}", "not match step length!\".format(item)) self._prior_cfg[item] = cfg.MODEL[item] @property def num_priors(self):", "# TODO test with image if isinstance(image, type(None)): image =", "= self.image_size[1] / self._steps[k] steps_y = self.image_size[0] / self._steps[k] cx", "cfg['min_sizes'] = [30, 60, 111, 162, 213, 264] cfg['flip'] =", "k): raise NotImplementedError @vis def _image_proc(self, image=None, tb_writer=None): # TODO", "cfg.MODEL: raise Exception(\"wrong anchor config!\") if len(cfg.MODEL[item]) != num_feat and", "bboxs = np.array(anchor).reshape((-1, 4)) box_centers = bboxs[:, :2] * scale[:2]", "k in range(len(layer_dims)): prior = [] for i, j in", "True feat_dim = [list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])] p", "y] # bboxs: [xmin, ymin, xmax, ymax] bboxs = np.hstack((bboxs[:,", "= cv2.imread(image, -1) image = cv2.resize(image, (self.image_size[1], self.image_size[0])) return image", "self._prior_cfg = {} self._clip = cfg.MODEL.CLIP self._variance = cfg.MODEL.VARIANCE for", "list) else min_sizes for ms in min_sizes: # min square", "_image_proc(self, image=None, tb_writer=None): # TODO test with image if isinstance(image,", "= [list(a) for a in zip([item * 2 for item", "= (j + 0.5) / steps_x # unit center x,y", "* sqrt(ar)] return prior # PriorBox = PriorBoxSSD def test_no_vis(cfg,", "0.5) / steps_y prior += self._create_prior(cx, cy, k) priors +=", "prior += [cx, cy, s_j * sqrt(ar), s_i / sqrt(ar)]", "213, 264] cfg['flip'] = True # feat_dim = [list(a) for", "import ssd_voc_vgg as cfg # from lib.utils.visualize_utils import TBWriter #", "= [self.image_size[1], self.image_size[0], self.image_size[1], self.image_size[0]] bboxs = np.array(anchor).reshape((-1, 4)) box_centers", "image.transpose((2,0,1)) writer.add_image('base/feature_map_{}_{}'.format(feat_idx, prior_idx), image, 2) def forward(self, layer_dims, tb_writer=None, image=None):", "= [] self._prior_cfg = {} self._clip = cfg.MODEL.CLIP self._variance =", "if len(self._prior_cfg['MAX_SIZES']) != 0: assert type(self._prior_cfg['MAX_SIZES'][k]) is not list #", "tb_writer=None): # TODO add output path to the signature writer", "cfg['flip'] = False cfg['aspect_ratios'] = [[2, 1 / 2], [2,", "bboxs[:, 2:4] / 2)) * scale box_centers = box_centers.astype(np.int32) bboxs", "s_j / sqrt(ar), s_i * sqrt(ar)] return prior # PriorBox", "(self.image_size[1], self.image_size[0])) return image @vis def _prior_vis(self, anchor, image_ori, feat_idx,", "sqrt(ar)] # a vertical box if self._flip: prior += [cx,", "1 / 2, 3, 1 / 3], [2, 1 /", "0), 1) image = image[..., ::-1] image = image.transpose((2,0,1)) writer.add_image('base/feature_map_{}_{}'.format(feat_idx,", "= PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer) cfg['flip'] = False cfg['aspect_ratios']", "264] cfg['flip'] = True feat_dim = [list(a) for a in", "bbox[1]), (bbox[2], bbox[3]), (0, 255, 0), 1) image = image[...,", "prior += self._create_prior(cx, cy, k) priors += prior self._prior_vis(prior, image,", "= p.forward(feat_dim, tb_writer=tb_writer) print(p1.shape) if __name__ == '__main__': import copy", "1 / 3], [2, 1 / 2], [2, 1 /", "min max square if len(self._prior_cfg['MAX_SIZES']) != 0: assert type(self._prior_cfg['MAX_SIZES'][k]) is", "priors += prior self._prior_vis(prior, image, k, tb_writer=tb_writer) output = torch.Tensor(priors).view(-1,", "for ar in self._prior_cfg['ASPECT_RATIOS'][k]: prior += [cx, cy, s_j *", "transform coordinates scale = [self.image_size[1], self.image_size[0], self.image_size[1], self.image_size[0]] bboxs =", "return prior # PriorBox = PriorBoxSSD def test_no_vis(cfg, tb_writer): cfg", "super(PriorBoxBase, self).__init__() self.image_size = cfg.MODEL.IMAGE_SIZE self._steps = cfg.MODEL.STEPS self._cfg_list =", "is not {} return [int(len(self._create_prior(0, 0, k)) / 4) for", "s_i * sqrt(ar)] return prior # PriorBox = PriorBoxSSD def", "cfg['image_size'] = [600, 300] p = PriorBoxSSD(cfg) p1 = p.forward(feat_dim,", "0, 255), -1) if archor[0] == archor[1]: # only show", "sqrt as sqrt from itertools import product as product import", "[] image = self._image_proc(image=image, tb_writer=tb_writer) for k in range(len(layer_dims)): prior", "from __future__ import division from math import sqrt as sqrt", "= self.num_priors[feat_idx] # transform coordinates scale = [self.image_size[1], self.image_size[0], self.image_size[1],", "to the signature writer = tb_writer.writer prior_num = self.num_priors[feat_idx] #", "in range(len(layer_dims)): prior = [] for i, j in product(range(layer_dims[k][0]),", "cy, s_j, s_i] # min max square if len(self._prior_cfg['MAX_SIZES']) !=", "wrapper(*args, **kw): return func(*args, **kw) if kw['tb_writer'] is not None", "[self.image_size[1], self.image_size[0], self.image_size[1], self.image_size[0]] bboxs = np.array(anchor).reshape((-1, 4)) box_centers =", "def _create_prior(self, cx, cy, k): raise NotImplementedError @vis def _image_proc(self,", "min_sizes: # min square s_i = ms / self.image_size[0] s_j", "sqrt(s_i * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[0])) s_j_prime = sqrt(s_j * (self._prior_cfg['MAX_SIZES'][k]", "# a vertical box if self._flip: prior += [cx, cy,", "is not None else None return wrapper class PriorBoxBase(object): \"\"\"Compute", "k in range(len(self._steps))] def _create_prior(self, cx, cy, k): raise NotImplementedError", "[38, 19, 10, 5, 3, 1] cfg['min_sizes'] = [30, 60,", "in product(range(layer_dims[k][0]), range(layer_dims[k][1])): steps_x = self.image_size[1] / self._steps[k] steps_y =", "k, tb_writer=tb_writer) output = torch.Tensor(priors).view(-1, 4) # TODO this clip", "# feat_dim = [list(a) for a in zip(cfg['feature_maps'], [item *", "p.forward(feat_dim, tb_writer=tb_writer) # print(p2) assert (p2 - p1).sum() < 1e-8", "1] cfg['min_sizes'] = [30, 60, 111, 162, 213, 264] cfg['flip']", "# only show diagnal anchors cv2.rectangle(image, (bbox[0], bbox[1]), (bbox[2], bbox[3]),", "cy, s_j / sqrt(ar), s_i * sqrt(ar)] return prior #", "2 for item in cfg['feature_maps']], cfg['feature_maps'])] cfg['image_size'] = [600, 300]", "center-offset form for each source feature map. \"\"\" def __init__(self,", "numpy as np import cv2 from lib.utils.visualize_utils import TBWriter def", "item in cfg['feature_maps']], cfg['feature_maps'])] cfg['image_size'] = [600, 300] p =", "[38, 19, 10, 5, 3, 1] cfg['min_sizes'] = [[30], [60],", "scale = [self.image_size[1], self.image_size[0], self.image_size[1], self.image_size[0]] bboxs = np.array(anchor).reshape((-1, 4))", "# unit center x,y cy = (i + 0.5) /", "= torch.Tensor(priors).view(-1, 4) # TODO this clip is meanless, should", "@vis def _prior_vis(self, anchor, image_ori, feat_idx, tb_writer=None): # TODO add", "(bbox[0], bbox[1]), (bbox[2], bbox[3]), (0, 255, 0), 1) image =", "cfg.MODEL.CLIP self._variance = cfg.MODEL.VARIANCE for v in self._variance: if v", "for prior_idx in range(prior_num): image = image_ori.copy() bboxs_ = bboxs[prior_idx::prior_num,", "tb_writer=None, image=None): priors = [] image = self._image_proc(image=image, tb_writer=tb_writer) for", "# TODO add output path to the signature writer =", "= (i + 0.5) / steps_y prior += self._create_prior(cx, cy,", "archor, bbox in zip(box_centers_, bboxs_): cv2.circle(image, (archor[0], archor[1]), 1, (0,", "lib.utils.visualize_utils import TBWriter def vis(func): \"\"\"tensorboard visualization if has writer", "tb_writer.writer prior_num = self.num_priors[feat_idx] # transform coordinates scale = [self.image_size[1],", "cfg # from lib.utils.visualize_utils import TBWriter # tb_writer = TBWriter(log_dir,", "source feature map. \"\"\" def __init__(self, cfg): super(PriorBoxBase, self).__init__() self.image_size", "= p.forward(feat_dim, tb_writer=tb_writer) cfg['flip'] = False cfg['aspect_ratios'] = [[2, 1", "'ASPECT_RATIOS'] self._flip = cfg.MODEL.FLIP self._setup(cfg) def _create_prior(self, cx, cy, k):", "else None return wrapper class PriorBoxBase(object): \"\"\"Compute priorbox coordinates in", "= bboxs[prior_idx::prior_num, :] box_centers_ = box_centers[4 * prior_idx::prior_num, :] for", "self._steps[k] steps_y = self.image_size[0] / self._steps[k] cx = (j +", "vertical box if self._flip: prior += [cx, cy, s_j /", "feature map size\"\"\" assert self._prior_cfg is not {} return [int(len(self._create_prior(0,", "layer_dims, tb_writer=None, image=None): priors = [] image = self._image_proc(image=image, tb_writer=tb_writer)", "square if len(self._prior_cfg['MAX_SIZES']) != 0: assert type(self._prior_cfg['MAX_SIZES'][k]) is not list", "prior num calculation before knowing feature map size\"\"\" assert self._prior_cfg", "tb_writer=tb_writer) for k in range(len(layer_dims)): prior = [] for i,", "s_i_prime = sqrt(s_i * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[0])) s_j_prime = sqrt(s_j", "2], [2, 1 / 2]] p = PriorBox(cfg) p2 =", "max square if len(self._prior_cfg['MAX_SIZES']) != 0: assert type(self._prior_cfg['MAX_SIZES'][k]) is not", "box_centers.astype(np.int32) bboxs = bboxs.astype(np.int32) # visualize each anchor box on", "for each source feature map. \"\"\" def __init__(self, cfg): super(PriorBoxBase,", "TODO test with image if isinstance(image, type(None)): image = np.ones((self.image_size[1],", "in cfg['feature_maps']])] # cfg['image_size'] = [300, 600] feat_dim = [list(a)", "prior_idx in range(prior_num): image = image_ori.copy() bboxs_ = bboxs[prior_idx::prior_num, :]", "+ bboxs[:, 2:4] / 2)) * scale box_centers = box_centers.astype(np.int32)", "ms in min_sizes: # min square s_i = ms /", "/ self.image_size[1] prior += [cx, cy, s_j, s_i] # min", "writer as input\"\"\" def wrapper(*args, **kw): return func(*args, **kw) if", "# bboxs: [xmin, ymin, xmax, ymax] bboxs = np.hstack((bboxs[:, :2]", "= [list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])] # cfg['image_size'] =", "priorbox coordinates in center-offset form for each source feature map.", "self._prior_cfg is not {} return [int(len(self._create_prior(0, 0, k)) / 4)", "[cx, cy, s_j_prime, s_i_prime] # rectangles by min and aspect", "cfg['feature_maps'])] p = PriorBoxSSD(cfg) print(p.num_priors) p1 = p.forward(feat_dim) print(p1) def", "a feature map for prior_idx in range(prior_num): image = image_ori.copy()", "super(PriorBoxSSD, self).__init__(cfg) # self.image_size = cfg['image_size'] self._cfg_list = ['MIN_SIZES', 'MAX_SIZES',", "self.image_size[0])) return image @vis def _prior_vis(self, anchor, image_ori, feat_idx, tb_writer=None):", "ymax] if self._clip: output.clamp_(max=1, min=0) return output class PriorBoxSSD(PriorBoxBase): def", "PriorBox(cfg) p2 = p.forward(feat_dim, tb_writer=tb_writer) # print(p2) assert (p2 -", "300] p = PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer) print(p1.shape) if", "import division from math import sqrt as sqrt from itertools", "zip([item * 2 for item in cfg['feature_maps']], cfg['feature_maps'])] cfg['image_size'] =", "cy, k): # as the original paper do prior =", "return wrapper class PriorBoxBase(object): \"\"\"Compute priorbox coordinates in center-offset form", "self.image_size[1] prior += [cx, cy, s_j, s_i] # min max", "'__main__': import copy # from lib.datasets.config import ssd_voc_vgg as cfg", "# from lib.utils.visualize_utils import TBWriter # tb_writer = TBWriter(log_dir, {'epoch':", "!= num_feat and len(cfg.MODEL[item]) != 0: raise Exception(\"config {} length", "import product as product import torch import numpy as np", "= [300, 300] # feat_dim = [list(a) for a in", "# print(p2) assert (p2 - p1).sum() < 1e-8 def test_rectangle(cfg,", "= [list(a) for a in zip(cfg['feature_maps'], [item * 2 for", "19, 10, 5, 3, 1] cfg['min_sizes'] = [[30], [60], 111,", "# min max square if len(self._prior_cfg['MAX_SIZES']) != 0: assert type(self._prior_cfg['MAX_SIZES'][k])", "(0, 0, 255), -1) if archor[0] == archor[1]: # only", "3)) elif isinstance(image, str): image = cv2.imread(image, -1) image =", "list # one max size per layer s_i_prime = sqrt(s_i", "is meanless, should clip on [xmin, ymin, xmax, ymax] if", "[2, 1 / 2], [2, 1 / 2]] p =", "= PriorBoxSSD(cfg) print(p.num_priors) p1 = p.forward(feat_dim) print(p1) def test_filp(cfg, tb_writer):", "0: assert type(self._prior_cfg['MAX_SIZES'][k]) is not list # one max size", "print(p.num_priors) p1 = p.forward(feat_dim) print(p1) def test_filp(cfg, tb_writer): cfg =", "feature map. \"\"\" def __init__(self, cfg): super(PriorBoxBase, self).__init__() self.image_size =", "/ sqrt(ar), s_i * sqrt(ar)] return prior # PriorBox =", "10, 5, 3, 1] cfg['flip'] = True feat_dim = [list(a)", "itertools import product as product import torch import numpy as", "scale box_centers = box_centers.astype(np.int32) bboxs = bboxs.astype(np.int32) # visualize each", "+= [cx, cy, s_j_prime, s_i_prime] # rectangles by min and", "on [xmin, ymin, xmax, ymax] if self._clip: output.clamp_(max=1, min=0) return", ":] box_centers_ = box_centers[4 * prior_idx::prior_num, :] for archor, bbox", "# TODO this clip is meanless, should clip on [xmin,", "/ steps_y prior += self._create_prior(cx, cy, k) priors += prior", "50}) # # test_no_vis(cfg, tb_writer) # test_filp(cfg, tb_writer) # test_rectangle(cfg,", "np import cv2 from lib.utils.visualize_utils import TBWriter def vis(func): \"\"\"tensorboard", "= [] min_sizes = self._prior_cfg['MIN_SIZES'][k] min_sizes = [min_sizes] if not", "torch.Tensor(priors).view(-1, 4) # TODO this clip is meanless, should clip", "/ 2, 3, 1 / 3], [2, 1 / 2],", "= image.transpose((2,0,1)) writer.add_image('base/feature_map_{}_{}'.format(feat_idx, prior_idx), image, 2) def forward(self, layer_dims, tb_writer=None,", "test_no_vis(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19, 10,", "raise Exception(\"config {} length does not match step length!\".format(item)) self._prior_cfg[item]", "as np import cv2 from lib.utils.visualize_utils import TBWriter def vis(func):", "from lib.datasets.config import ssd_voc_vgg as cfg # from lib.utils.visualize_utils import", "/ 4) for k in range(len(self._steps))] def _create_prior(self, cx, cy,", "s_j, s_i] # min max square if len(self._prior_cfg['MAX_SIZES']) != 0:", "cfg['flip'] = True feat_dim = [list(a) for a in zip(cfg['feature_maps'],", "product(range(layer_dims[k][0]), range(layer_dims[k][1])): steps_x = self.image_size[1] / self._steps[k] steps_y = self.image_size[0]", "copy # from lib.datasets.config import ssd_voc_vgg as cfg # from", "layer s_i_prime = sqrt(s_i * (self._prior_cfg['MAX_SIZES'][k] / self.image_size[0])) s_j_prime =", "and aspect ratio for ar in self._prior_cfg['ASPECT_RATIOS'][k]: prior += [cx,", "cfg['feature_maps']])] # cfg['image_size'] = [300, 600] feat_dim = [list(a) for", "prior_num = self.num_priors[feat_idx] # transform coordinates scale = [self.image_size[1], self.image_size[0],", "writer.add_image('base/feature_map_{}_{}'.format(feat_idx, prior_idx), image, 2) def forward(self, layer_dims, tb_writer=None, image=None): priors", "in min_sizes: # min square s_i = ms / self.image_size[0]", "(archor[0], archor[1]), 1, (0, 0, 255), -1) if archor[0] ==", "item in cfg['feature_maps']])] # cfg['image_size'] = [300, 600] feat_dim =", "wrapper class PriorBoxBase(object): \"\"\"Compute priorbox coordinates in center-offset form for", "zip(box_centers_, bboxs_): cv2.circle(image, (archor[0], archor[1]), 1, (0, 0, 255), -1)", "= [30, 60, 111, 162, 213, 264] cfg['flip'] = True", "= cfg.MODEL[item] @property def num_priors(self): \"\"\"allow prior num calculation before", "box_centers_ = box_centers[4 * prior_idx::prior_num, :] for archor, bbox in", "tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps'] = [38, 19, 10, 5,", "bbox in zip(box_centers_, bboxs_): cv2.circle(image, (archor[0], archor[1]), 1, (0, 0,", "add output path to the signature writer = tb_writer.writer prior_num", "+ 0.5) / steps_x # unit center x,y cy =", "return image @vis def _prior_vis(self, anchor, image_ori, feat_idx, tb_writer=None): #", "1] cfg['min_sizes'] = [[30], [60], 111, 162, 213, 264] cfg['flip']", "def num_priors(self): \"\"\"allow prior num calculation before knowing feature map", "not isinstance(min_sizes, list) else min_sizes for ms in min_sizes: #", "paper do prior = [] min_sizes = self._prior_cfg['MIN_SIZES'][k] min_sizes =", "# # test_no_vis(cfg, tb_writer) # test_filp(cfg, tb_writer) # test_rectangle(cfg, tb_writer)", "(self._prior_cfg['MAX_SIZES'][k] / self.image_size[1])) prior += [cx, cy, s_j_prime, s_i_prime] #", "if archor[0] == archor[1]: # only show diagnal anchors cv2.rectangle(image,", "<= 0: raise ValueError('Variances must be greater than 0') def", "self._prior_cfg[item] = cfg.MODEL[item] @property def num_priors(self): \"\"\"allow prior num calculation", "__init__(self, cfg): super(PriorBoxSSD, self).__init__(cfg) # self.image_size = cfg['image_size'] self._cfg_list =", "= cfg['image_size'] self._cfg_list = ['MIN_SIZES', 'MAX_SIZES', 'ASPECT_RATIOS'] self._flip = cfg.MODEL.FLIP", "PriorBox = PriorBoxSSD def test_no_vis(cfg, tb_writer): cfg = copy.deepcopy(cfg) cfg['feature_maps']", "= PriorBox(cfg) p2 = p.forward(feat_dim, tb_writer=tb_writer) # print(p2) assert (p2", "min_sizes = [min_sizes] if not isinstance(min_sizes, list) else min_sizes for", "min and aspect ratio for ar in self._prior_cfg['ASPECT_RATIOS'][k]: prior +=", "= cv2.resize(image, (self.image_size[1], self.image_size[0])) return image @vis def _prior_vis(self, anchor,", "func(*args, **kw) if kw['tb_writer'] is not None else None return", "3], [2, 1 / 2, 3, 1 / 3], [2,", "# feat_dim = [list(a) for a in zip(cfg['feature_maps'], cfg['feature_maps'])] #", "steps_x # unit center x,y cy = (i + 0.5)", "import torch import numpy as np import cv2 from lib.utils.visualize_utils", "PriorBoxSSD(cfg) print(p.num_priors) p1 = p.forward(feat_dim) print(p1) def test_filp(cfg, tb_writer): cfg", "image = cv2.imread(image, -1) image = cv2.resize(image, (self.image_size[1], self.image_size[0])) return", "bboxs = np.hstack((bboxs[:, :2] - bboxs[:, 2:4] / 2, bboxs[:,", "prior = [] min_sizes = self._prior_cfg['MIN_SIZES'][k] min_sizes = [min_sizes] if", "cfg['image_size'] self._cfg_list = ['MIN_SIZES', 'MAX_SIZES', 'ASPECT_RATIOS'] self._flip = cfg.MODEL.FLIP self._setup(cfg)", "image = image[..., ::-1] image = image.transpose((2,0,1)) writer.add_image('base/feature_map_{}_{}'.format(feat_idx, prior_idx), image,", "@property def num_priors(self): \"\"\"allow prior num calculation before knowing feature", "coordinates in center-offset form for each source feature map. \"\"\"", "clip on [xmin, ymin, xmax, ymax] if self._clip: output.clamp_(max=1, min=0)", "image[..., ::-1] image = image.transpose((2,0,1)) writer.add_image('base/feature_map_{}_{}'.format(feat_idx, prior_idx), image, 2) def", "raise ValueError('Variances must be greater than 0') def _setup(self, cfg):", "in self._prior_cfg['ASPECT_RATIOS'][k]: prior += [cx, cy, s_j * sqrt(ar), s_i", "cv2.rectangle(image, (bbox[0], bbox[1]), (bbox[2], bbox[3]), (0, 255, 0), 1) image", "self._clip: output.clamp_(max=1, min=0) return output class PriorBoxSSD(PriorBoxBase): def __init__(self, cfg):", "[600, 300] p = PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer) print(p1.shape)", "self._create_prior(cx, cy, k) priors += prior self._prior_vis(prior, image, k, tb_writer=tb_writer)", "cfg['feature_maps'])] # cfg['image_size'] = [300, 300] # feat_dim = [list(a)", "for item in self._cfg_list: if item not in cfg.MODEL: raise", "test_filp(cfg, tb_writer) # test_rectangle(cfg, tb_writer) print('haha') from lib.utils.config import cfg", "[30, 60, 111, 162, 213, 264] cfg['flip'] = True #", "not in cfg.MODEL: raise Exception(\"wrong anchor config!\") if len(cfg.MODEL[item]) !=", "s_i] # min max square if len(self._prior_cfg['MAX_SIZES']) != 0: assert", "bboxs: [xmin, ymin, xmax, ymax] bboxs = np.hstack((bboxs[:, :2] -", "19, 10, 5, 3, 1] cfg['flip'] = True feat_dim =", "cfg['flip'] = True # feat_dim = [list(a) for a in", "len(self._prior_cfg['MAX_SIZES']) != 0: assert type(self._prior_cfg['MAX_SIZES'][k]) is not list # one", "in self._cfg_list: if item not in cfg.MODEL: raise Exception(\"wrong anchor", "p = PriorBoxSSD(cfg) p1 = p.forward(feat_dim, tb_writer=tb_writer) print(p1.shape) if __name__", "xmax, ymax] if self._clip: output.clamp_(max=1, min=0) return output class PriorBoxSSD(PriorBoxBase):", "self._prior_cfg['MIN_SIZES'][k] min_sizes = [min_sizes] if not isinstance(min_sizes, list) else min_sizes", "60, 111, 162, 213, 264] cfg['flip'] = True # feat_dim", "19, 10, 5, 3, 1] cfg['min_sizes'] = [30, 60, 111,", "prior += [cx, cy, s_j, s_i] # min max square", "in range(prior_num): image = image_ori.copy() bboxs_ = bboxs[prior_idx::prior_num, :] box_centers_", "self.image_size[1] / self._steps[k] steps_y = self.image_size[0] / self._steps[k] cx =", "[xmin, ymin, xmax, ymax] if self._clip: output.clamp_(max=1, min=0) return output", "/ 3], [2, 1 / 2, 3, 1 / 3],", "/ steps_x # unit center x,y cy = (i +", "archor[1]: # only show diagnal anchors cv2.rectangle(image, (bbox[0], bbox[1]), (bbox[2],", "from lib.utils.visualize_utils import TBWriter # tb_writer = TBWriter(log_dir, {'epoch': 50})", "Exception(\"config {} length does not match step length!\".format(item)) self._prior_cfg[item] =", "= [[2, 1 / 2], [2, 1 / 2, 3,", "bboxs[prior_idx::prior_num, :] box_centers_ = box_centers[4 * prior_idx::prior_num, :] for archor,", "2)) * scale box_centers = box_centers.astype(np.int32) bboxs = bboxs.astype(np.int32) #", "5, 3, 1] cfg['flip'] = True feat_dim = [list(a) for", "def _image_proc(self, image=None, tb_writer=None): # TODO test with image if", "if self._clip: output.clamp_(max=1, min=0) return output class PriorBoxSSD(PriorBoxBase): def __init__(self,", "self._variance = cfg.MODEL.VARIANCE for v in self._variance: if v <=" ]
[ "s = str( self ) if IECore.FileSequence.fileNameValidator().match( s ) :", "isinstance( exampleValue, basestring ) : combiner = mostCommon if combiner", "child ) ) else : nonLeafPaths.append( child ) sequences =", "values ) / len( values ) def mostCommon( values )", "path ) : result = self.__basePathSeed.copy() if isinstance( path, basestring", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "reproduce the above # copyright notice, this list of conditions", "int, float ) ) : combiner = average elif isinstance(", ") : for key, exampleValue in infos[0].items() : if key", "use the seed for creating base paths whenever we need", ") ) else : nonLeafPaths.append( child ) sequences = IECore.findSequences(", "# # Redistribution and use in source and binary forms,", "Redistributions of source code must retain the above # copyright", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,", "value in counter : counter[value] += 1 else : counter[value]", "def average( values ) : return sum( values ) /", "return False return True def isLeaf( self ) : for", "\"fileSystem:modificationTime\" : max, \"fileSystem:accessTime\" : max, \"fileSystem:size\" : sum, }", "+ nonLeafPaths : result.append( SequencePath( self.__basePath( str( path ) ),", "binary form must reproduce the above # copyright notice, this", "minSequenceSize = self.__minSequenceSize, filter = self.getFilter() ) result.setFromPath( self )", "self, path, root=\"/\", minSequenceSize=1, filter=None ) : if not isinstance(", ") : result = self.__basePathSeed.copy() if isinstance( path, basestring )", "the above # copyright notice, this list of conditions and", "key in result : continue combiner = combiners.get( key, None", ": p = self.__basePath( self ) children = p.children() nonLeafPaths", ": values = [ i[key] for i in infos ]", "PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE", "self.__basePath( str( path ) ), minSequenceSize=self.__minSequenceSize, filter = self.getFilter() )", "if isinstance( path, basestring ) : result.setFromString( path ) else", "self ) : sequence = None with IECore.IgnoredExceptions( Exception )", "return result def __basePaths( self ) : sequence = None", ": combiner = average elif isinstance( exampleValue, basestring ) :", "[ i[key] for i in infos ] result[key] = combiner(", "documentation and/or other materials provided with # the distribution. #", "DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", "if value in counter : counter[value] += 1 else :", "contributors to this software may be used to endorse or", ": leafPathStrings.append( str( child ) ) else : nonLeafPaths.append( child", "if isinstance( exampleValue, ( int, float ) ) : combiner", "leafPathStrings, self.__minSequenceSize ) result = [] for path in sequences", "conditions are # met: # # * Redistributions of source", "Exception ) : sequence = IECore.FileSequence( str( self ) )", "mostCommonValue = value maxCount = count return mostCommonValue combiners =", "above # copyright notice, this list of conditions and the", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #", "isValid( self ) : for p in self.__basePaths() : if", "this list of conditions and the following # disclaimer. #", "1 else : counter[value] = 1 maxCount = 0 mostCommonValue", "THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF", "modification, are permitted provided that the following conditions are #", "in result : continue combiner = combiners.get( key, None )", "sequence : for f in sequence.fileNames() : result.append( self.__basePath( f", "must reproduce the above # copyright notice, this list of", ") # we use the seed for creating base paths", "materials provided with # the distribution. # # * Neither", "def __basePaths( self ) : sequence = None with IECore.IgnoredExceptions(", ": result.setFromPath( path ) return result def __basePaths( self )", ") : def __init__( self, path, root=\"/\", minSequenceSize=1, filter=None )", "OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "# copyright notice, this list of conditions and the following", "True def info( self ) : result = Gaffer.Path.info( self", "mostCommon, \"fileSystem:group\" : mostCommon, \"fileSystem:modificationTime\" : max, \"fileSystem:accessTime\" : max,", "result = SequencePath( self.__basePathSeed, minSequenceSize = self.__minSequenceSize, filter = self.getFilter()", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR", "source and binary forms, with or without # modification, are", "result = [] if sequence : for f in sequence.fileNames()", "for path in sequences + nonLeafPaths : result.append( SequencePath( self.__basePath(", "> maxCount : mostCommonValue = value maxCount = count return", "this list of conditions and the following # disclaimer in", "f in sequence.fileNames() : result.append( self.__basePath( f ) ) else", "return result def copy( self ) : result = SequencePath(", "return result def __isSequence( self ) : s = str(", "2012-2013, Image Engine Design Inc. All rights reserved. # #", "in values : if value in counter : counter[value] +=", "and/or other materials provided with # the distribution. # #", "IN ANY WAY OUT OF THE USE OF THIS #", "isinstance( path, Gaffer.Path ) : path = Gaffer.FileSystemPath( path, root", "INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY,", "IECore.IgnoredExceptions( Exception ) : sequence = IECore.FileSequence( str( self )", "maxCount = 0 mostCommonValue = None for value, count in", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF", ": if key in result : continue combiner = combiners.get(", "= self.__basePathSeed.copy() if isinstance( path, basestring ) : result.setFromString( path", "of # any other contributors to this software may be", ": if not isinstance( path, Gaffer.Path ) : path =", ": if not p.isLeaf() : return False return True def", "with # the distribution. # # * Neither the name", "NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE", "Gaffer.Path.__init__( self, path[:], path.root(), filter=filter ) # we use the", "values ) : counter = {} for value in values", "return None def average( values ) : return sum( values", "= IECore.findSequences( leafPathStrings, self.__minSequenceSize ) result = [] for path", "] result[key] = combiner( values ) return result def _children(", "self.__basePaths() : if not p.isLeaf() : return False return True", "NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND", "GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS;", "result.setFromPath( path ) return result def __basePaths( self ) :", "self.__minSequenceSize, filter = self.getFilter() ) result.setFromPath( self ) return result", "and binary forms, with or without # modification, are permitted", "the following # disclaimer in the documentation and/or other materials", ") return result def __basePaths( self ) : sequence =", "OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "[ path.info() for path in self.__basePaths() ] if len( infos", ": result.append( SequencePath( self.__basePath( str( path ) ), minSequenceSize=self.__minSequenceSize, filter", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR #", "# # Copyright (c) 2012-2013, Image Engine Design Inc. All", "and the following # disclaimer in the documentation and/or other", "\"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", ") : combiner = average elif isinstance( exampleValue, basestring )", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE", "= combiner( values ) return result def _children( self )", "or # promote products derived from this software without specific", "THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN", "def isLeaf( self ) : for p in self.__basePaths() :", "list of conditions and the following # disclaimer in the", "Gaffer.Path.info( self ) if result is None : return None", "########################################################################## import IECore import Gaffer class SequencePath( Gaffer.Path ) :", "maxCount = count return mostCommonValue combiners = { \"fileSystem:owner\" :", "= [ path.info() for path in self.__basePaths() ] if len(", "= path self.__minSequenceSize = minSequenceSize def isValid( self ) :", "True def isLeaf( self ) : for p in self.__basePaths()", "max, \"fileSystem:size\" : sum, } infos = [ path.info() for", "NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;", "not isinstance( path, Gaffer.Path ) : path = Gaffer.FileSystemPath( path,", "for path in self.__basePaths() ] if len( infos ) :", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL", "A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL", "written permission. # # THIS SOFTWARE IS PROVIDED BY THE", "def __init__( self, path, root=\"/\", minSequenceSize=1, filter=None ) : if", "children = p.children() nonLeafPaths = [] leafPathStrings = [] for", "nonLeafPaths = [] leafPathStrings = [] for child in children", "conditions and the following # disclaimer in the documentation and/or", "OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "path, root=\"/\", minSequenceSize=1, filter=None ) : if not isinstance( path,", "in sequence.fileNames() : result.append( self.__basePath( f ) ) else :", "[] for child in children : if child.isLeaf() : leafPathStrings.append(", "continue combiner = combiners.get( key, None ) if combiner is", "Inc. All rights reserved. # # Redistribution and use in", "INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", ") : result.setFromString( path ) else : result.setFromPath( path )", "= p.children() nonLeafPaths = [] leafPathStrings = [] for child", "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "(c) 2012-2013, Image Engine Design Inc. All rights reserved. #", ") if IECore.FileSequence.fileNameValidator().match( s ) : return True return False", "count return mostCommonValue combiners = { \"fileSystem:owner\" : mostCommon, \"fileSystem:group\"", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF", "import Gaffer class SequencePath( Gaffer.Path ) : def __init__( self,", "info( self ) : result = Gaffer.Path.info( self ) if", "prior # written permission. # # THIS SOFTWARE IS PROVIDED", ": if not p.isValid() : return False return True def", "sequences = IECore.findSequences( leafPathStrings, self.__minSequenceSize ) result = [] for", "= [] for path in sequences + nonLeafPaths : result.append(", ": def __init__( self, path, root=\"/\", minSequenceSize=1, filter=None ) :", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "1 maxCount = 0 mostCommonValue = None for value, count", "{} for value in values : if value in counter", "# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF", "None : if isinstance( exampleValue, ( int, float ) )", "may be used to endorse or # promote products derived", "sum( values ) / len( values ) def mostCommon( values", "Gaffer class SequencePath( Gaffer.Path ) : def __init__( self, path,", "name of <NAME> nor the names of # any other", ": result.setFromString( path ) else : result.setFromPath( path ) return", "COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT,", "FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT", ") : if not isinstance( path, Gaffer.Path ) : path", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,", "(INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "if sequence : for f in sequence.fileNames() : result.append( self.__basePath(", "disclaimer. # # * Redistributions in binary form must reproduce", "= self.getFilter() ) result.setFromPath( self ) return result def __basePath(", "with IECore.IgnoredExceptions( Exception ) : sequence = IECore.FileSequence( str( self", "# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR", "# ########################################################################## import IECore import Gaffer class SequencePath( Gaffer.Path )", "without specific prior # written permission. # # THIS SOFTWARE", "p in self.__basePaths() : if not p.isValid() : return False", "child in children : if child.isLeaf() : leafPathStrings.append( str( child", "SequencePath( self.__basePathSeed, minSequenceSize = self.__minSequenceSize, filter = self.getFilter() ) result.setFromPath(", "sequence.fileNames() : result.append( self.__basePath( f ) ) else : result.append(", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, #", "HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR", "isinstance( exampleValue, ( int, float ) ) : combiner =", "other materials provided with # the distribution. # # *", "specific prior # written permission. # # THIS SOFTWARE IS", ") : counter = {} for value in values :", "following # disclaimer in the documentation and/or other materials provided", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED.", "self ) : result = Gaffer.Path.info( self ) if result", "for key, exampleValue in infos[0].items() : if key in result", "result def copy( self ) : result = SequencePath( self.__basePathSeed,", "= [ i[key] for i in infos ] result[key] =", ") return result def __basePath( self, path ) : result", "# promote products derived from this software without specific prior", "sequence = IECore.FileSequence( str( self ) ) result = []", "result = self.__basePathSeed.copy() if isinstance( path, basestring ) : result.setFromString(", "= [] for child in children : if child.isLeaf() :", "OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "def _children( self ) : p = self.__basePath( self )", ": counter[value] = 1 maxCount = 0 mostCommonValue = None", "# IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "Gaffer.FileSystemPath( path, root ) Gaffer.Path.__init__( self, path[:], path.root(), filter=filter )", "minSequenceSize=1, filter=None ) : if not isinstance( path, Gaffer.Path )", "or without # modification, are permitted provided that the following", "Redistribution and use in source and binary forms, with or", "self ) : p = self.__basePath( self ) children =", "exampleValue, basestring ) : combiner = mostCommon if combiner is", ") : p = self.__basePath( self ) children = p.children()", "source code must retain the above # copyright notice, this", "if len( infos ) : for key, exampleValue in infos[0].items()", "in infos ] result[key] = combiner( values ) return result", ": if isinstance( exampleValue, ( int, float ) ) :", "provided that the following conditions are # met: # #", "endorse or # promote products derived from this software without", "# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", ") if result is None : return None def average(", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE", "SUCH DAMAGE. # ########################################################################## import IECore import Gaffer class SequencePath(", "= 0 mostCommonValue = None for value, count in counter.items()", "# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "if not p.isValid() : return False return True def isLeaf(", ") result.setFromPath( self ) return result def __basePath( self, path", "USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE", "OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "rights reserved. # # Redistribution and use in source and", "= average elif isinstance( exampleValue, basestring ) : combiner =", "is None : return None def average( values ) :", "need them self.__basePathSeed = path self.__minSequenceSize = minSequenceSize def isValid(", ": combiner = mostCommon if combiner is not None :", "children : if child.isLeaf() : leafPathStrings.append( str( child ) )", "# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY", "i[key] for i in infos ] result[key] = combiner( values", "that the following conditions are # met: # # *", "# * Neither the name of <NAME> nor the names", "Engine Design Inc. All rights reserved. # # Redistribution and", ": for p in self.__basePaths() : if not p.isValid() :", "count > maxCount : mostCommonValue = value maxCount = count", "infos ] result[key] = combiner( values ) return result def", ") sequences = IECore.findSequences( leafPathStrings, self.__minSequenceSize ) result = []", "average elif isinstance( exampleValue, basestring ) : combiner = mostCommon", "maxCount : mostCommonValue = value maxCount = count return mostCommonValue", "if child.isLeaf() : leafPathStrings.append( str( child ) ) else :", "* Redistributions in binary form must reproduce the above #", ") else : result.append( self.__basePath( self ) ) return result", "# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "minSequenceSize=self.__minSequenceSize, filter = self.getFilter() ) ) return result def copy(", "# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING", "for value in values : if value in counter :", "None : values = [ i[key] for i in infos", "result def __basePath( self, path ) : result = self.__basePathSeed.copy()", ") : s = str( self ) if IECore.FileSequence.fileNameValidator().match( s", ": result = SequencePath( self.__basePathSeed, minSequenceSize = self.__minSequenceSize, filter =", ") return result def copy( self ) : result =", "combiners = { \"fileSystem:owner\" : mostCommon, \"fileSystem:group\" : mostCommon, \"fileSystem:modificationTime\"", "the following conditions are # met: # # * Redistributions", "result def __isSequence( self ) : s = str( self", "following conditions are # met: # # * Redistributions of", "def info( self ) : result = Gaffer.Path.info( self )", "Design Inc. All rights reserved. # # Redistribution and use", "count in counter.items() : if count > maxCount : mostCommonValue", "in children : if child.isLeaf() : leafPathStrings.append( str( child )", ": sequence = None with IECore.IgnoredExceptions( Exception ) : sequence", "filter=filter ) # we use the seed for creating base", "self.getFilter() ) result.setFromPath( self ) return result def __basePath( self,", "/ len( values ) def mostCommon( values ) : counter", "self.__basePath( self ) ) return result def __isSequence( self )", "result def __basePaths( self ) : sequence = None with", "in source and binary forms, with or without # modification,", "OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "counter : counter[value] += 1 else : counter[value] = 1", "for i in infos ] result[key] = combiner( values )", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "[] for path in sequences + nonLeafPaths : result.append( SequencePath(", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\"", "path.root(), filter=filter ) # we use the seed for creating", "result.setFromString( path ) else : result.setFromPath( path ) return result", "retain the above # copyright notice, this list of conditions", "filter=None ) : if not isinstance( path, Gaffer.Path ) :", "infos = [ path.info() for path in self.__basePaths() ] if", ": mostCommon, \"fileSystem:group\" : mostCommon, \"fileSystem:modificationTime\" : max, \"fileSystem:accessTime\" :", "OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR", "CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,", "return sum( values ) / len( values ) def mostCommon(", "the name of <NAME> nor the names of # any", "nonLeafPaths.append( child ) sequences = IECore.findSequences( leafPathStrings, self.__minSequenceSize ) result", "self ) ) return result def __isSequence( self ) :", "use in source and binary forms, with or without #", "i in infos ] result[key] = combiner( values ) return", "this software may be used to endorse or # promote", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES", "CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE)", "for p in self.__basePaths() : if not p.isValid() : return", "to this software may be used to endorse or #", "following # disclaimer. # # * Redistributions in binary form", ") : combiner = mostCommon if combiner is not None", "disclaimer in the documentation and/or other materials provided with #", "sum, } infos = [ path.info() for path in self.__basePaths()", "p in self.__basePaths() : if not p.isLeaf() : return False", "combiner( values ) return result def _children( self ) :", "_children( self ) : p = self.__basePath( self ) children", "= Gaffer.Path.info( self ) if result is None : return", "Neither the name of <NAME> nor the names of #", "# the distribution. # # * Neither the name of", "= str( self ) if IECore.FileSequence.fileNameValidator().match( s ) : return", "sequence = None with IECore.IgnoredExceptions( Exception ) : sequence =", "permitted provided that the following conditions are # met: #", "WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN", "None def average( values ) : return sum( values )", "Gaffer.Path ) : path = Gaffer.FileSystemPath( path, root ) Gaffer.Path.__init__(", "in self.__basePaths() : if not p.isValid() : return False return", "the documentation and/or other materials provided with # the distribution.", "Image Engine Design Inc. All rights reserved. # # Redistribution", ": s = str( self ) if IECore.FileSequence.fileNameValidator().match( s )", "AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT,", "for p in self.__basePaths() : if not p.isLeaf() : return", ") : return sum( values ) / len( values )", "( int, float ) ) : combiner = average elif", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND", "# # * Redistributions of source code must retain the", "# any other contributors to this software may be used", "result = [] for path in sequences + nonLeafPaths :", "POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import IECore import Gaffer", "with or without # modification, are permitted provided that the", "nonLeafPaths : result.append( SequencePath( self.__basePath( str( path ) ), minSequenceSize=self.__minSequenceSize,", ": for f in sequence.fileNames() : result.append( self.__basePath( f )", "mostCommonValue = None for value, count in counter.items() : if", ") ), minSequenceSize=self.__minSequenceSize, filter = self.getFilter() ) ) return result", "False return True def info( self ) : result =", "mostCommon( values ) : counter = {} for value in", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE", "# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "[] if sequence : for f in sequence.fileNames() : result.append(", "values ) def mostCommon( values ) : counter = {}", "key, exampleValue in infos[0].items() : if key in result :", "this software without specific prior # written permission. # #", "TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "result.append( SequencePath( self.__basePath( str( path ) ), minSequenceSize=self.__minSequenceSize, filter =", "p.isLeaf() : return False return True def info( self )", "= value maxCount = count return mostCommonValue combiners = {", "if key in result : continue combiner = combiners.get( key,", "values ) return result def _children( self ) : p", "self.__basePathSeed.copy() if isinstance( path, basestring ) : result.setFromString( path )", "\"fileSystem:size\" : sum, } infos = [ path.info() for path", "not p.isValid() : return False return True def isLeaf( self", "IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "distribution. # # * Neither the name of <NAME> nor", ": result = Gaffer.Path.info( self ) if result is None", "for f in sequence.fileNames() : result.append( self.__basePath( f ) )", "self.__minSequenceSize ) result = [] for path in sequences +", "combiner is not None : values = [ i[key] for", ": mostCommonValue = value maxCount = count return mostCommonValue combiners", "creating base paths whenever we need them self.__basePathSeed = path", "THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY", ") ) result = [] if sequence : for f", "met: # # * Redistributions of source code must retain", "value maxCount = count return mostCommonValue combiners = { \"fileSystem:owner\"", "if combiner is None : if isinstance( exampleValue, ( int,", "used to endorse or # promote products derived from this", "SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED", "BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR", "= IECore.FileSequence( str( self ) ) result = [] if", "base paths whenever we need them self.__basePathSeed = path self.__minSequenceSize", "# * Redistributions in binary form must reproduce the above", "False return True def isLeaf( self ) : for p", "and use in source and binary forms, with or without", "p.children() nonLeafPaths = [] leafPathStrings = [] for child in", "the seed for creating base paths whenever we need them", ") : for p in self.__basePaths() : if not p.isLeaf()", "whenever we need them self.__basePathSeed = path self.__minSequenceSize = minSequenceSize", "= self.__minSequenceSize, filter = self.getFilter() ) result.setFromPath( self ) return", "= count return mostCommonValue combiners = { \"fileSystem:owner\" : mostCommon,", "code must retain the above # copyright notice, this list", "= minSequenceSize def isValid( self ) : for p in", "self, path ) : result = self.__basePathSeed.copy() if isinstance( path,", "values = [ i[key] for i in infos ] result[key]", "None with IECore.IgnoredExceptions( Exception ) : sequence = IECore.FileSequence( str(", "self ) if IECore.FileSequence.fileNameValidator().match( s ) : return True return", "path in sequences + nonLeafPaths : result.append( SequencePath( self.__basePath( str(", ": max, \"fileSystem:size\" : sum, } infos = [ path.info()", "mostCommon if combiner is not None : values = [", "of conditions and the following # disclaimer. # # *", "are # met: # # * Redistributions of source code", "# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import IECore import", "self, path[:], path.root(), filter=filter ) # we use the seed", ": sum, } infos = [ path.info() for path in", "path in self.__basePaths() ] if len( infos ) : for", ") / len( values ) def mostCommon( values ) :", ") : sequence = None with IECore.IgnoredExceptions( Exception ) :", ": if child.isLeaf() : leafPathStrings.append( str( child ) ) else", "promote products derived from this software without specific prior #", "combiner is None : if isinstance( exampleValue, ( int, float", "AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN", ": path = Gaffer.FileSystemPath( path, root ) Gaffer.Path.__init__( self, path[:],", "= { \"fileSystem:owner\" : mostCommon, \"fileSystem:group\" : mostCommon, \"fileSystem:modificationTime\" :", "combiners.get( key, None ) if combiner is None : if", "else : nonLeafPaths.append( child ) sequences = IECore.findSequences( leafPathStrings, self.__minSequenceSize", "combiner = mostCommon if combiner is not None : values", "forms, with or without # modification, are permitted provided that", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR #", "copy( self ) : result = SequencePath( self.__basePathSeed, minSequenceSize =", "TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "binary forms, with or without # modification, are permitted provided", "software may be used to endorse or # promote products", "self.getFilter() ) ) return result def copy( self ) :", "EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE", "is not None : values = [ i[key] for i", ") ) else : result.append( self.__basePath( self ) ) return", "p = self.__basePath( self ) children = p.children() nonLeafPaths =", "SequencePath( Gaffer.Path ) : def __init__( self, path, root=\"/\", minSequenceSize=1,", ") return result def __isSequence( self ) : s =", "self ) if result is None : return None def", "self.__basePaths() ] if len( infos ) : for key, exampleValue", "path = Gaffer.FileSystemPath( path, root ) Gaffer.Path.__init__( self, path[:], path.root(),", "key, None ) if combiner is None : if isinstance(", "# met: # # * Redistributions of source code must", "len( infos ) : for key, exampleValue in infos[0].items() :", "if combiner is not None : values = [ i[key]", "__isSequence( self ) : s = str( self ) if", ") : result = SequencePath( self.__basePathSeed, minSequenceSize = self.__minSequenceSize, filter", "nor the names of # any other contributors to this", "USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED", ") else : nonLeafPaths.append( child ) sequences = IECore.findSequences( leafPathStrings,", ": mostCommon, \"fileSystem:modificationTime\" : max, \"fileSystem:accessTime\" : max, \"fileSystem:size\" :", "in self.__basePaths() : if not p.isLeaf() : return False return", "= SequencePath( self.__basePathSeed, minSequenceSize = self.__minSequenceSize, filter = self.getFilter() )", ": sequence = IECore.FileSequence( str( self ) ) result =", "we need them self.__basePathSeed = path self.__minSequenceSize = minSequenceSize def", "ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT", "result = Gaffer.Path.info( self ) if result is None :", "TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF", "FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO", "ARISING IN ANY WAY OUT OF THE USE OF THIS", ") if combiner is None : if isinstance( exampleValue, (", "# written permission. # # THIS SOFTWARE IS PROVIDED BY", "other contributors to this software may be used to endorse", "= 1 maxCount = 0 mostCommonValue = None for value,", "# disclaimer. # # * Redistributions in binary form must", "of <NAME> nor the names of # any other contributors", "mostCommon, \"fileSystem:modificationTime\" : max, \"fileSystem:accessTime\" : max, \"fileSystem:size\" : sum,", ": max, \"fileSystem:accessTime\" : max, \"fileSystem:size\" : sum, } infos", "= self.__basePath( self ) children = p.children() nonLeafPaths = []", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY", "# Redistribution and use in source and binary forms, with", ": if value in counter : counter[value] += 1 else", "OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import IECore", "result is None : return None def average( values )", ": result.append( self.__basePath( f ) ) else : result.append( self.__basePath(", "average( values ) : return sum( values ) / len(", "from this software without specific prior # written permission. #", "them self.__basePathSeed = path self.__minSequenceSize = minSequenceSize def isValid( self", "IECore.FileSequence( str( self ) ) result = [] if sequence", "self ) : result = SequencePath( self.__basePathSeed, minSequenceSize = self.__minSequenceSize,", "THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "if not p.isLeaf() : return False return True def info(", "root=\"/\", minSequenceSize=1, filter=None ) : if not isinstance( path, Gaffer.Path", "and the following # disclaimer. # # * Redistributions in", "Gaffer.Path ) : def __init__( self, path, root=\"/\", minSequenceSize=1, filter=None", "= self.getFilter() ) ) return result def copy( self )", "exampleValue, ( int, float ) ) : combiner = average", ") result = [] if sequence : for f in", "self ) ) result = [] if sequence : for", "* Neither the name of <NAME> nor the names of", ") Gaffer.Path.__init__( self, path[:], path.root(), filter=filter ) # we use", ") ) : combiner = average elif isinstance( exampleValue, basestring", "str( self ) if IECore.FileSequence.fileNameValidator().match( s ) : return True", ": result = self.__basePathSeed.copy() if isinstance( path, basestring ) :", ") else : result.setFromPath( path ) return result def __basePaths(", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR #", ": if count > maxCount : mostCommonValue = value maxCount", "# modification, are permitted provided that the following conditions are", ": return sum( values ) / len( values ) def", "def copy( self ) : result = SequencePath( self.__basePathSeed, minSequenceSize", "OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT", "IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR", "None ) if combiner is None : if isinstance( exampleValue,", "# we use the seed for creating base paths whenever", "\"fileSystem:group\" : mostCommon, \"fileSystem:modificationTime\" : max, \"fileSystem:accessTime\" : max, \"fileSystem:size\"", "STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING", "self.__basePathSeed, minSequenceSize = self.__minSequenceSize, filter = self.getFilter() ) result.setFromPath( self", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY", "OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR", "isinstance( path, basestring ) : result.setFromString( path ) else :", "[] leafPathStrings = [] for child in children : if", ") children = p.children() nonLeafPaths = [] leafPathStrings = []", "return True def isLeaf( self ) : for p in", "basestring ) : combiner = mostCommon if combiner is not", "return result def __basePath( self, path ) : result =", "# # * Redistributions in binary form must reproduce the", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS #", "AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED", "= {} for value in values : if value in", ": counter = {} for value in values : if", "mostCommonValue combiners = { \"fileSystem:owner\" : mostCommon, \"fileSystem:group\" : mostCommon,", ": return False return True def info( self ) :", "None : return None def average( values ) : return", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", ") : path = Gaffer.FileSystemPath( path, root ) Gaffer.Path.__init__( self,", "form must reproduce the above # copyright notice, this list", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "counter = {} for value in values : if value", "minSequenceSize def isValid( self ) : for p in self.__basePaths()", "# disclaimer in the documentation and/or other materials provided with", "else : result.setFromPath( path ) return result def __basePaths( self", "), minSequenceSize=self.__minSequenceSize, filter = self.getFilter() ) ) return result def", "sequences + nonLeafPaths : result.append( SequencePath( self.__basePath( str( path )", "path, basestring ) : result.setFromString( path ) else : result.setFromPath(", "notice, this list of conditions and the following # disclaimer", "DAMAGE. # ########################################################################## import IECore import Gaffer class SequencePath( Gaffer.Path", "* Redistributions of source code must retain the above #", "LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS", "value, count in counter.items() : if count > maxCount :", "notice, this list of conditions and the following # disclaimer.", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED", "# Copyright (c) 2012-2013, Image Engine Design Inc. All rights", "else : counter[value] = 1 maxCount = 0 mostCommonValue =", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "combiner = combiners.get( key, None ) if combiner is None", "else : result.append( self.__basePath( self ) ) return result def", "float ) ) : combiner = average elif isinstance( exampleValue,", "INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF", "None for value, count in counter.items() : if count >", "child.isLeaf() : leafPathStrings.append( str( child ) ) else : nonLeafPaths.append(", "OUT OF THE USE OF THIS # SOFTWARE, EVEN IF", "BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY,", "if not isinstance( path, Gaffer.Path ) : path = Gaffer.FileSystemPath(", "} infos = [ path.info() for path in self.__basePaths() ]", "basestring ) : result.setFromString( path ) else : result.setFromPath( path", "result.append( self.__basePath( f ) ) else : result.append( self.__basePath( self", "to endorse or # promote products derived from this software", "path self.__minSequenceSize = minSequenceSize def isValid( self ) : for", "return mostCommonValue combiners = { \"fileSystem:owner\" : mostCommon, \"fileSystem:group\" :", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF #", ") : result = Gaffer.Path.info( self ) if result is", "copyright notice, this list of conditions and the following #", "of source code must retain the above # copyright notice,", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES", ": counter[value] += 1 else : counter[value] = 1 maxCount", "\"fileSystem:owner\" : mostCommon, \"fileSystem:group\" : mostCommon, \"fileSystem:modificationTime\" : max, \"fileSystem:accessTime\"", "counter[value] = 1 maxCount = 0 mostCommonValue = None for", "= None for value, count in counter.items() : if count", "All rights reserved. # # Redistribution and use in source", "in counter.items() : if count > maxCount : mostCommonValue =", "path, root ) Gaffer.Path.__init__( self, path[:], path.root(), filter=filter ) #", "values : if value in counter : counter[value] += 1", "without # modification, are permitted provided that the following conditions", "provided with # the distribution. # # * Neither the", "child ) sequences = IECore.findSequences( leafPathStrings, self.__minSequenceSize ) result =", "result : continue combiner = combiners.get( key, None ) if", "Copyright (c) 2012-2013, Image Engine Design Inc. All rights reserved.", "= mostCommon if combiner is not None : values =", "__basePath( self, path ) : result = self.__basePathSeed.copy() if isinstance(", "0 mostCommonValue = None for value, count in counter.items() :", "self ) : s = str( self ) if IECore.FileSequence.fileNameValidator().match(", "self.__basePath( f ) ) else : result.append( self.__basePath( self )", "__init__( self, path, root=\"/\", minSequenceSize=1, filter=None ) : if not", "root ) Gaffer.Path.__init__( self, path[:], path.root(), filter=filter ) # we", "leafPathStrings = [] for child in children : if child.isLeaf()", "def __basePath( self, path ) : result = self.__basePathSeed.copy() if", "HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, #", "LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN", "names of # any other contributors to this software may", "the distribution. # # * Neither the name of <NAME>", "def mostCommon( values ) : counter = {} for value", "result.setFromPath( self ) return result def __basePath( self, path )", "not p.isLeaf() : return False return True def info( self", "path ) ), minSequenceSize=self.__minSequenceSize, filter = self.getFilter() ) ) return", "self ) return result def __basePath( self, path ) :", "def __isSequence( self ) : s = str( self )", "reserved. # # Redistribution and use in source and binary", ": for p in self.__basePaths() : if not p.isLeaf() :", ": for key, exampleValue in infos[0].items() : if key in", "= combiners.get( key, None ) if combiner is None :", "of conditions and the following # disclaimer in the documentation", "str( path ) ), minSequenceSize=self.__minSequenceSize, filter = self.getFilter() ) )", "for value, count in counter.items() : if count > maxCount", "IECore.findSequences( leafPathStrings, self.__minSequenceSize ) result = [] for path in", "BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY", "ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "path ) else : result.setFromPath( path ) return result def", "LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING #", "import IECore import Gaffer class SequencePath( Gaffer.Path ) : def", "len( values ) def mostCommon( values ) : counter =", "the names of # any other contributors to this software", ") result = [] for path in sequences + nonLeafPaths", "infos[0].items() : if key in result : continue combiner =", "Redistributions in binary form must reproduce the above # copyright", ": return False return True def isLeaf( self ) :", "elif isinstance( exampleValue, basestring ) : combiner = mostCommon if", "class SequencePath( Gaffer.Path ) : def __init__( self, path, root=\"/\",", "# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "filter = self.getFilter() ) result.setFromPath( self ) return result def", "ANY WAY OUT OF THE USE OF THIS # SOFTWARE,", "not None : values = [ i[key] for i in", "str( self ) ) result = [] if sequence :", "exampleValue in infos[0].items() : if key in result : continue", "# * Redistributions of source code must retain the above", "\"fileSystem:accessTime\" : max, \"fileSystem:size\" : sum, } infos = [", "IECore import Gaffer class SequencePath( Gaffer.Path ) : def __init__(", "LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION)", ") ) return result def __isSequence( self ) : s", "path[:], path.root(), filter=filter ) # we use the seed for", "f ) ) else : result.append( self.__basePath( self ) )", "in binary form must reproduce the above # copyright notice,", "in counter : counter[value] += 1 else : counter[value] =", "self.__basePathSeed = path self.__minSequenceSize = minSequenceSize def isValid( self )", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE", "path.info() for path in self.__basePaths() ] if len( infos )", "must retain the above # copyright notice, this list of", "self.__basePath( self ) children = p.children() nonLeafPaths = [] leafPathStrings", "{ \"fileSystem:owner\" : mostCommon, \"fileSystem:group\" : mostCommon, \"fileSystem:modificationTime\" : max,", "OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON", "we use the seed for creating base paths whenever we", "counter.items() : if count > maxCount : mostCommonValue = value", "] if len( infos ) : for key, exampleValue in", ": continue combiner = combiners.get( key, None ) if combiner", "permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "path, Gaffer.Path ) : path = Gaffer.FileSystemPath( path, root )", "are permitted provided that the following conditions are # met:", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE", "self.__minSequenceSize = minSequenceSize def isValid( self ) : for p", "filter = self.getFilter() ) ) return result def copy( self", "return result def _children( self ) : p = self.__basePath(", "any other contributors to this software may be used to", "combiner = average elif isinstance( exampleValue, basestring ) : combiner", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS", ") return result def _children( self ) : p =", "= [] leafPathStrings = [] for child in children :", "########################################################################## # # Copyright (c) 2012-2013, Image Engine Design Inc.", "= None with IECore.IgnoredExceptions( Exception ) : sequence = IECore.FileSequence(", "result.append( self.__basePath( self ) ) return result def __isSequence( self", "leafPathStrings.append( str( child ) ) else : nonLeafPaths.append( child )", "max, \"fileSystem:accessTime\" : max, \"fileSystem:size\" : sum, } infos =", ": nonLeafPaths.append( child ) sequences = IECore.findSequences( leafPathStrings, self.__minSequenceSize )", "in self.__basePaths() ] if len( infos ) : for key,", "in sequences + nonLeafPaths : result.append( SequencePath( self.__basePath( str( path", "counter[value] += 1 else : counter[value] = 1 maxCount =", "self ) : for p in self.__basePaths() : if not", "in infos[0].items() : if key in result : continue combiner", "result[key] = combiner( values ) return result def _children( self", "OF SUCH DAMAGE. # ########################################################################## import IECore import Gaffer class", "value in values : if value in counter : counter[value]", ") ) return result def copy( self ) : result", ") def mostCommon( values ) : counter = {} for", "isLeaf( self ) : for p in self.__basePaths() : if", "for creating base paths whenever we need them self.__basePathSeed =", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "self.__basePaths() : if not p.isValid() : return False return True", "= Gaffer.FileSystemPath( path, root ) Gaffer.Path.__init__( self, path[:], path.root(), filter=filter", "SequencePath( self.__basePath( str( path ) ), minSequenceSize=self.__minSequenceSize, filter = self.getFilter()", "derived from this software without specific prior # written permission.", "return False return True def info( self ) : result", "paths whenever we need them self.__basePathSeed = path self.__minSequenceSize =", "software without specific prior # written permission. # # THIS", "PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER", "result def _children( self ) : p = self.__basePath( self", "list of conditions and the following # disclaimer. # #", "seed for creating base paths whenever we need them self.__basePathSeed", "CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, #", ": return None def average( values ) : return sum(", ") : for p in self.__basePaths() : if not p.isValid()", "OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED", "infos ) : for key, exampleValue in infos[0].items() : if", "for child in children : if child.isLeaf() : leafPathStrings.append( str(", "the following # disclaimer. # # * Redistributions in binary", "be used to endorse or # promote products derived from", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ##########################################################################", "if result is None : return None def average( values", "in the documentation and/or other materials provided with # the", "<NAME> nor the names of # any other contributors to", "# # * Neither the name of <NAME> nor the", "SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS", ": result.append( self.__basePath( self ) ) return result def __isSequence(", "path ) return result def __basePaths( self ) : sequence", "if count > maxCount : mostCommonValue = value maxCount =", "__basePaths( self ) : sequence = None with IECore.IgnoredExceptions( Exception", "self ) children = p.children() nonLeafPaths = [] leafPathStrings =", ") : sequence = IECore.FileSequence( str( self ) ) result", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import", "values ) : return sum( values ) / len( values", "(INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS", "conditions and the following # disclaimer. # # * Redistributions", "p.isValid() : return False return True def isLeaf( self )", "products derived from this software without specific prior # written", "str( child ) ) else : nonLeafPaths.append( child ) sequences", "def isValid( self ) : for p in self.__basePaths() :", "= [] if sequence : for f in sequence.fileNames() :", "+= 1 else : counter[value] = 1 maxCount = 0", "is None : if isinstance( exampleValue, ( int, float )", "IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS", "return True def info( self ) : result = Gaffer.Path.info(", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR" ]
[ "django.db import migrations class Migration(migrations.Migration): dependencies = [ ('reo', '0117_financialmodel_generator_fuel_escalation_pct'),", "= [ ('reo', '0117_financialmodel_generator_fuel_escalation_pct'), ('reo', '0120_auto_20210927_2046'), ('reo', '0121_auto_20211012_0305') ] operations", "from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('reo',", "Django 3.1.13 on 2021-10-01 18:41 from django.db import migrations class", "class Migration(migrations.Migration): dependencies = [ ('reo', '0117_financialmodel_generator_fuel_escalation_pct'), ('reo', '0120_auto_20210927_2046'), ('reo',", "dependencies = [ ('reo', '0117_financialmodel_generator_fuel_escalation_pct'), ('reo', '0120_auto_20210927_2046'), ('reo', '0121_auto_20211012_0305') ]", "'0117_financialmodel_generator_fuel_escalation_pct'), ('reo', '0120_auto_20210927_2046'), ('reo', '0121_auto_20211012_0305') ] operations = [ ]", "on 2021-10-01 18:41 from django.db import migrations class Migration(migrations.Migration): dependencies", "[ ('reo', '0117_financialmodel_generator_fuel_escalation_pct'), ('reo', '0120_auto_20210927_2046'), ('reo', '0121_auto_20211012_0305') ] operations =", "by Django 3.1.13 on 2021-10-01 18:41 from django.db import migrations", "migrations class Migration(migrations.Migration): dependencies = [ ('reo', '0117_financialmodel_generator_fuel_escalation_pct'), ('reo', '0120_auto_20210927_2046'),", "18:41 from django.db import migrations class Migration(migrations.Migration): dependencies = [", "2021-10-01 18:41 from django.db import migrations class Migration(migrations.Migration): dependencies =", "import migrations class Migration(migrations.Migration): dependencies = [ ('reo', '0117_financialmodel_generator_fuel_escalation_pct'), ('reo',", "3.1.13 on 2021-10-01 18:41 from django.db import migrations class Migration(migrations.Migration):", "Migration(migrations.Migration): dependencies = [ ('reo', '0117_financialmodel_generator_fuel_escalation_pct'), ('reo', '0120_auto_20210927_2046'), ('reo', '0121_auto_20211012_0305')", "('reo', '0117_financialmodel_generator_fuel_escalation_pct'), ('reo', '0120_auto_20210927_2046'), ('reo', '0121_auto_20211012_0305') ] operations = [", "# Generated by Django 3.1.13 on 2021-10-01 18:41 from django.db", "Generated by Django 3.1.13 on 2021-10-01 18:41 from django.db import" ]
[ "\"by%sIsolationMVArun2v1DBnewDMwLT\") tau.idCI3hit = id3(tau, \"by%sCombinedIsolationDeltaBetaCorr3Hits\") tau.idAntiMu = tau.tauID(\"againstMuonLoose3\") + tau.tauID(\"againstMuonTight3\")", "alltaus: tau.associatedVertex = event.goodVertices[0] if len(event.goodVertices)>0 else event.vertices[0] tau.lepVeto =", "Analyzer ): def __init__(self, cfg_ana, cfg_comp, looperName ): super(TauAnalyzer,self).__init__(cfg_ana,cfg_comp,looperName) #----------------------------------------", "l.pt(), reverse = True) event.otherTaus.sort(key = lambda l : l.pt(),", "('slimmedTaus',''),'std::vector<pat::Tau>') def beginLoop(self, setup): super(TauAnalyzer,self).beginLoop(setup) self.counters.addCounter('events') count = self.counters.counter('events') count.register('all", "(very loose, loose, medium, tight, very tight, very very tight)\"\"\"", "event.inclusiveTaus: tau.loose_lepVeto = False if self.cfg_ana.loose_vetoLeptons: for lep in event.selectedLeptons:", "\\ tau.pt() > self.cfg_ana.loose_ptMin and abs(tau.eta()) < self.cfg_ana.loose_etaMax and \\", "= TauAnalyzer, # inclusive very loose hadronic tau selection inclusive_ptMin", "if hasattr(event, 'gentaus'): self.matchTaus(event) return True # Find the definitions", "PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle from PhysicsTools.Heppy.physicsobjects.Tau import Tau from PhysicsTools.HeppyCore.utils.deltar import", "event.selectedTaus.sort(key = lambda l : l.pt(), reverse = True) event.otherTaus.sort(key", "True if hasattr(event, 'gentaus'): self.matchTaus(event) return True # Find the", "self).declareHandles() self.handles['taus'] = AutoHandle( ('slimmedTaus',''),'std::vector<pat::Tau>') def beginLoop(self, setup): super(TauAnalyzer,self).beginLoop(setup) self.counters.addCounter('events')", "the definitions of the tau ID strings here: # http://cmslxr.fnal.gov/lxr/source/PhysicsTools/PatAlgos/python/producersLayer1/tauProducer_cfi.py", "cfg_comp, looperName ): super(TauAnalyzer,self).__init__(cfg_ana,cfg_comp,looperName) #---------------------------------------- # DECLARATION OF HANDLES OF", "lambda l : l.pt(), reverse = True) event.selectedTaus.sort(key = lambda", "following two IDs are required inclusive_tauAntiMuonID = \"\", inclusive_tauAntiElectronID =", "= 0.4, inclusive_vetoLeptons = False, inclusive_leptonVetoDR = 0.4, inclusive_decayModeID =", "= map( Tau, self.handles['taus'].product() ) #make inclusive taus for tau", "tau.phi()) < self.cfg_ana.loose_leptonVetoDR: tau.loose_lepVeto = True if self.cfg_ana.loose_vetoLeptonsPOG: if not", "= False, inclusive_leptonVetoDR = 0.4, inclusive_decayModeID = \"decayModeFindingNewDMs\", # ignored", "lep.genp = gen def process(self, event): self.readCollections( event.input ) self.makeTaus(event)", "IDs are required inclusive_tauAntiMuonID = \"\", inclusive_tauAntiElectronID = \"\", #", "len(event.selectedTaus): self.counters.counter('events').inc('has >=1 selected taus') if len(event.otherTaus): self.counters.counter('events').inc('has >=1 other", "setup): super(TauAnalyzer,self).beginLoop(setup) self.counters.addCounter('events') count = self.counters.counter('events') count.register('all events') count.register('has >=1", "l : l.pt(), reverse = True) self.counters.counter('events').inc('all events') if len(event.inclusiveTaus):", "import AutoHandle from PhysicsTools.Heppy.physicsobjects.Tau import Tau from PhysicsTools.HeppyCore.utils.deltar import deltaR,", "= AutoHandle( ('slimmedTaus',''),'std::vector<pat::Tau>') def beginLoop(self, setup): super(TauAnalyzer,self).beginLoop(setup) self.counters.addCounter('events') count =", "# If True, the following two IDs are required inclusive_tauAntiMuonID", "tau at preselection') if len(event.selectedTaus): self.counters.counter('events').inc('has >=1 selected taus') if", "0.2, loose_vetoLeptons = True, loose_leptonVetoDR = 0.4, loose_decayModeID = \"decayModeFindingNewDMs\",", ">=1 other taus') #------------------ # MAKE LEPTON LISTS #------------------ def", "super(TauAnalyzer,self).__init__(cfg_ana,cfg_comp,looperName) #---------------------------------------- # DECLARATION OF HANDLES OF LEPTONS STUFF #----------------------------------------", "if tau.tauID(self.cfg_ana.inclusive_tauID): event.inclusiveTaus.append(tau) for tau in event.inclusiveTaus: tau.loose_lepVeto = False", "loose, loose, medium, tight, very tight)\"\"\" return id3(tau, X) +", "self.cfg_ana.inclusive_vetoLeptons: for lep in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi())", "= 1 if gen else 0 lep.genp = gen def", "strings here: # http://cmslxr.fnal.gov/lxr/source/PhysicsTools/PatAlgos/python/producersLayer1/tauProducer_cfi.py setattr(TauAnalyzer,\"defaultConfig\",cfg.Analyzer( class_object = TauAnalyzer, # inclusive", "taus') #------------------ # MAKE LEPTON LISTS #------------------ def makeTaus(self, event):", "tau.inclusive_lepVeto: continue if self.cfg_ana.inclusive_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.inclusive_tauAntiMuonID): tau.inclusive_lepVeto = True", "taus') def matchTaus(self, event): match = matchObjectCollection3(event.inclusiveTaus, event.gentaus, deltaRMax =", "MAKE LEPTON LISTS #------------------ def makeTaus(self, event): event.inclusiveTaus = []", "self.counters.addCounter('events') count = self.counters.counter('events') count.register('all events') count.register('has >=1 tau at", "18, loose_etaMax = 9999, loose_dxyMax = 1000., loose_dzMax = 0.2,", "self.matchTaus(event) return True # Find the definitions of the tau", "if not tau.tauID(self.cfg_ana.inclusive_tauAntiMuonID): tau.inclusive_lepVeto = True if not tau.tauID(self.cfg_ana.inclusive_tauAntiElectronID): tau.inclusive_lepVeto", "inclusive_dxyMax = 1000., inclusive_dzMax = 0.4, inclusive_vetoLeptons = False, inclusive_leptonVetoDR", "event.selectedTaus.append(tau) else: event.otherTaus.append(tau) event.inclusiveTaus.sort(key = lambda l : l.pt(), reverse", "event.inclusiveTaus: gen = match[lep] lep.mcMatchId = 1 if gen else", "medium, tight, very tight, very very tight)\"\"\" return id5(tau, X)", "if abs(tau.dxy()) > self.cfg_ana.inclusive_dxyMax or abs(tau.dz()) > self.cfg_ana.inclusive_dzMax: continue def", "loose, medium, tight, very tight)\"\"\" return id3(tau, X) + tau.tauID(X%\"VLoose\")", "= 0.4, inclusive_decayModeID = \"decayModeFindingNewDMs\", # ignored if not set", "def beginLoop(self, setup): super(TauAnalyzer,self).beginLoop(setup) self.counters.addCounter('events') count = self.counters.counter('events') count.register('all events')", "tau.tauID(\"decayModeFinding\") tau.idDecayModeNewDMs = tau.tauID(\"decayModeFindingNewDMs\") if hasattr(self.cfg_ana, 'inclusive_decayModeID') and self.cfg_ana.inclusive_decayModeID and", "integer equal to 1-2-3 for (loose,medium,tight)\"\"\" return tau.tauID(X%\"Loose\") + tau.tauID(X%\"Medium\")", "self.cfg_ana.inclusive_leptonVetoDR: tau.inclusive_lepVeto = True if tau.inclusive_lepVeto: continue if self.cfg_ana.inclusive_vetoLeptonsPOG: if", "count = self.counters.counter('events') count.register('all events') count.register('has >=1 tau at preselection')", "tau selection inclusive_ptMin = 18, inclusive_etaMax = 9999, inclusive_dxyMax =", "= \"\", inclusive_tauAntiElectronID = \"\", # loose hadronic tau selection", "tau.idDecayMode = tau.tauID(\"decayModeFinding\") tau.idDecayModeNewDMs = tau.tauID(\"decayModeFindingNewDMs\") if hasattr(self.cfg_ana, 'inclusive_decayModeID') and", "or abs(tau.dz()) > self.cfg_ana.inclusive_dzMax: continue def id3(tau,X): \"\"\"Create an integer", "\"\"\"Create an integer equal to 1-2-3-4-5-6 for (very loose, loose,", "= \"decayModeFindingNewDMs\", # ignored if not set or \"\" loose_tauID", "for (loose,medium,tight)\"\"\" return tau.tauID(X%\"Loose\") + tau.tauID(X%\"Medium\") + tau.tauID(X%\"Tight\") def id5(tau,X):", "matchObjectCollection3 import PhysicsTools.HeppyCore.framework.config as cfg class TauAnalyzer( Analyzer ): def", "very very tight)\"\"\" return id5(tau, X) + tau.tauID(X%\"VVTight\") tau.idMVA =", "integer equal to 1-2-3-4-5-6 for (very loose, loose, medium, tight,", "%5.1f: idMVA2 %d, idCI3hit %d, %s, %s\" % (tau.pt(), tau.idMVA2,", ": l.pt(), reverse = True) event.selectedTaus.sort(key = lambda l :", "else 0 lep.genp = gen def process(self, event): self.readCollections( event.input", "matchObjectCollection3(event.inclusiveTaus, event.gentaus, deltaRMax = 0.5) for lep in event.inclusiveTaus: gen", "if not set or \"\" loose_tauID = \"byLooseCombinedIsolationDeltaBetaCorr3Hits\", loose_vetoLeptonsPOG =", "= \"decayModeFindingNewDMs\", inclusive_vetoLeptonsPOG = False, # If True, the following", "# Find the definitions of the tau ID strings here:", "loose_decayModeID = \"decayModeFindingNewDMs\", # ignored if not set or \"\"", "tau.tauID(self.cfg_ana.tauLooseID)) if tau.tauID(self.cfg_ana.inclusive_tauID): event.inclusiveTaus.append(tau) for tau in event.inclusiveTaus: tau.loose_lepVeto =", "event.otherTaus.sort(key = lambda l : l.pt(), reverse = True) self.counters.counter('events').inc('all", "self.counters.counter('events').inc('has >=1 selected taus') if len(event.otherTaus): self.counters.counter('events').inc('has >=1 other taus')", "hasattr(self.cfg_ana, 'inclusive_decayModeID') and self.cfg_ana.inclusive_decayModeID and not tau.tauID(self.cfg_ana.inclusive_decayModeID): continue tau.inclusive_lepVeto =", "an integer equal to 1-2-3-4-5 for (very loose, loose, medium,", "LEPTON LISTS #------------------ def makeTaus(self, event): event.inclusiveTaus = [] event.selectedTaus", "inclusive_vetoLeptons = False, inclusive_leptonVetoDR = 0.4, inclusive_decayModeID = \"decayModeFindingNewDMs\", #", "in alltaus: tau.associatedVertex = event.goodVertices[0] if len(event.goodVertices)>0 else event.vertices[0] tau.lepVeto", "\"\" loose_tauID = \"byLooseCombinedIsolationDeltaBetaCorr3Hits\", loose_vetoLeptonsPOG = False, # If True,", "+ tau.tauID(X%\"VVTight\") tau.idMVA = id6(tau, \"by%sIsolationMVArun2v1DBoldDMwLT\") tau.idMVANewDM = id6(tau, \"by%sIsolationMVArun2v1DBnewDMwLT\")", "for (very loose, loose, medium, tight, very tight, very very", "%s, %s\" % (tau.pt(), tau.idMVA2, tau.idCI3hit, tau.tauID(self.cfg_ana.tauID), tau.tauID(self.cfg_ana.tauLooseID)) if tau.tauID(self.cfg_ana.inclusive_tauID):", "inclusive_leptonVetoDR = 0.4, inclusive_decayModeID = \"decayModeFindingNewDMs\", # ignored if not", "here: # http://cmslxr.fnal.gov/lxr/source/PhysicsTools/PatAlgos/python/producersLayer1/tauProducer_cfi.py setattr(TauAnalyzer,\"defaultConfig\",cfg.Analyzer( class_object = TauAnalyzer, # inclusive very", "(tau.pt(), tau.idMVA2, tau.idCI3hit, tau.tauID(self.cfg_ana.tauID), tau.tauID(self.cfg_ana.tauLooseID)) if tau.tauID(self.cfg_ana.inclusive_tauID): event.inclusiveTaus.append(tau) for tau", "= gen def process(self, event): self.readCollections( event.input ) self.makeTaus(event) if", "self.cfg_ana.inclusive_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.inclusive_tauAntiMuonID): tau.inclusive_lepVeto = True if not tau.tauID(self.cfg_ana.inclusive_tauAntiElectronID):", "pt %5.1f: idMVA2 %d, idCI3hit %d, %s, %s\" % (tau.pt(),", "# MAKE LEPTON LISTS #------------------ def makeTaus(self, event): event.inclusiveTaus =", "set or \"\" inclusive_tauID = \"decayModeFindingNewDMs\", inclusive_vetoLeptonsPOG = False, #", "tau.eta(), tau.phi()) < self.cfg_ana.loose_leptonVetoDR: tau.loose_lepVeto = True if self.cfg_ana.loose_vetoLeptonsPOG: if", "of the tau ID strings here: # http://cmslxr.fnal.gov/lxr/source/PhysicsTools/PatAlgos/python/producersLayer1/tauProducer_cfi.py setattr(TauAnalyzer,\"defaultConfig\",cfg.Analyzer( class_object", "= matchObjectCollection3(event.inclusiveTaus, event.gentaus, deltaRMax = 0.5) for lep in event.inclusiveTaus:", "loose_vetoLeptons = True, loose_leptonVetoDR = 0.4, loose_decayModeID = \"decayModeFindingNewDMs\", #", "> self.cfg_ana.loose_ptMin and abs(tau.eta()) < self.cfg_ana.loose_etaMax and \\ abs(tau.dxy()) <", "< self.cfg_ana.loose_dzMax and \\ tau.tauID(self.cfg_ana.loose_tauID) and not tau.loose_lepVeto: event.selectedTaus.append(tau) else:", "not tau.tauID(self.cfg_ana.inclusive_tauAntiMuonID): tau.inclusive_lepVeto = True if not tau.tauID(self.cfg_ana.inclusive_tauAntiElectronID): tau.inclusive_lepVeto =", "tau.tauID(X%\"Loose\") + tau.tauID(X%\"Medium\") + tau.tauID(X%\"Tight\") def id5(tau,X): \"\"\"Create an integer", "(loose,medium,tight)\"\"\" return tau.tauID(X%\"Loose\") + tau.tauID(X%\"Medium\") + tau.tauID(X%\"Tight\") def id5(tau,X): \"\"\"Create", "= 18, loose_etaMax = 9999, loose_dxyMax = 1000., loose_dzMax =", "False tau.idDecayMode = tau.tauID(\"decayModeFinding\") tau.idDecayModeNewDMs = tau.tauID(\"decayModeFindingNewDMs\") if hasattr(self.cfg_ana, 'inclusive_decayModeID')", "\\ tau.tauID(self.cfg_ana.loose_tauID) and not tau.loose_lepVeto: event.selectedTaus.append(tau) else: event.otherTaus.append(tau) event.inclusiveTaus.sort(key =", "> self.cfg_ana.inclusive_dxyMax or abs(tau.dz()) > self.cfg_ana.inclusive_dzMax: continue def id3(tau,X): \"\"\"Create", "\"by%sCombinedIsolationDeltaBetaCorr3Hits\") tau.idAntiMu = tau.tauID(\"againstMuonLoose3\") + tau.tauID(\"againstMuonTight3\") tau.idAntiE = id5(tau, \"againstElectron%sMVA6\")", "True) event.otherTaus.sort(key = lambda l : l.pt(), reverse = True)", ") self.makeTaus(event) if not self.cfg_comp.isMC: return True if hasattr(event, 'gentaus'):", "tau.lepVeto = False tau.idDecayMode = tau.tauID(\"decayModeFinding\") tau.idDecayModeNewDMs = tau.tauID(\"decayModeFindingNewDMs\") if", "lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.inclusive_leptonVetoDR: tau.inclusive_lepVeto = True if tau.inclusive_lepVeto:", "self.cfg_ana.loose_etaMax and \\ abs(tau.dxy()) < self.cfg_ana.loose_dxyMax and abs(tau.dz()) < self.cfg_ana.loose_dzMax", "self.cfg_ana.inclusive_ptMin: continue if abs(tau.eta()) > self.cfg_ana.inclusive_etaMax: continue if abs(tau.dxy()) >", "id6(tau, \"by%sIsolationMVArun2v1DBoldDMwLT\") tau.idMVANewDM = id6(tau, \"by%sIsolationMVArun2v1DBnewDMwLT\") tau.idCI3hit = id3(tau, \"by%sCombinedIsolationDeltaBetaCorr3Hits\")", "l : l.pt(), reverse = True) event.selectedTaus.sort(key = lambda l", "1-2-3-4-5-6 for (very loose, loose, medium, tight, very tight, very", ": l.pt(), reverse = True) event.otherTaus.sort(key = lambda l :", "tau.pt() < self.cfg_ana.inclusive_ptMin: continue if abs(tau.eta()) > self.cfg_ana.inclusive_etaMax: continue if", "if len(event.inclusiveTaus): self.counters.counter('events').inc('has >=1 tau at preselection') if len(event.selectedTaus): self.counters.counter('events').inc('has", "from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle from PhysicsTools.Heppy.physicsobjects.Tau import Tau from PhysicsTools.HeppyCore.utils.deltar", "self.cfg_ana.loose_leptonVetoDR: tau.loose_lepVeto = True if self.cfg_ana.loose_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.loose_tauAntiMuonID): tau.loose_lepVeto", "PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle from PhysicsTools.Heppy.physicsobjects.Tau import", "If True, the following two IDs are required inclusive_tauAntiMuonID =", "tau in alltaus: tau.associatedVertex = event.goodVertices[0] if len(event.goodVertices)>0 else event.vertices[0]", "HANDLES OF LEPTONS STUFF #---------------------------------------- def declareHandles(self): super(TauAnalyzer, self).declareHandles() self.handles['taus']", "and abs(tau.eta()) < self.cfg_ana.loose_etaMax and \\ abs(tau.dxy()) < self.cfg_ana.loose_dxyMax and", "to 1-2-3 for (loose,medium,tight)\"\"\" return tau.tauID(X%\"Loose\") + tau.tauID(X%\"Medium\") + tau.tauID(X%\"Tight\")", "9999, inclusive_dxyMax = 1000., inclusive_dzMax = 0.4, inclusive_vetoLeptons = False,", "selection loose_ptMin = 18, loose_etaMax = 9999, loose_dxyMax = 1000.,", "two IDs are required loose_tauAntiMuonID = \"againstMuonLoose3\", loose_tauAntiElectronID = \"againstElectronLooseMVA5\"", "lep in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.inclusive_leptonVetoDR:", "taus for tau in alltaus: tau.associatedVertex = event.goodVertices[0] if len(event.goodVertices)>0", "self.cfg_comp.isMC: return True if hasattr(event, 'gentaus'): self.matchTaus(event) return True #", "\"Tau pt %5.1f: idMVA2 %d, idCI3hit %d, %s, %s\" %", "reverse = True) event.otherTaus.sort(key = lambda l : l.pt(), reverse", "PhysicsTools.HeppyCore.utils.deltar import deltaR, matchObjectCollection3 import PhysicsTools.HeppyCore.framework.config as cfg class TauAnalyzer(", "very loose hadronic tau selection inclusive_ptMin = 18, inclusive_etaMax =", "0 lep.genp = gen def process(self, event): self.readCollections( event.input )", "and self.cfg_ana.inclusive_decayModeID and not tau.tauID(self.cfg_ana.inclusive_decayModeID): continue tau.inclusive_lepVeto = False if", "return True if hasattr(event, 'gentaus'): self.matchTaus(event) return True # Find", "not tau.tauID(self.cfg_ana.loose_tauAntiElectronID): tau.loose_lepVeto = True if tau.tauID(self.cfg_ana.loose_decayModeID) and \\ tau.pt()", ">=1 selected taus') count.register('has >=1 other taus') #------------------ # MAKE", "equal to 1-2-3 for (loose,medium,tight)\"\"\" return tau.tauID(X%\"Loose\") + tau.tauID(X%\"Medium\") +", "super(TauAnalyzer, self).declareHandles() self.handles['taus'] = AutoHandle( ('slimmedTaus',''),'std::vector<pat::Tau>') def beginLoop(self, setup): super(TauAnalyzer,self).beginLoop(setup)", "abs(tau.dxy()) > self.cfg_ana.inclusive_dxyMax or abs(tau.dz()) > self.cfg_ana.inclusive_dzMax: continue def id3(tau,X):", "tau.idCI3hit, tau.tauID(self.cfg_ana.tauID), tau.tauID(self.cfg_ana.tauLooseID)) if tau.tauID(self.cfg_ana.inclusive_tauID): event.inclusiveTaus.append(tau) for tau in event.inclusiveTaus:", "# ignored if not set or \"\" inclusive_tauID = \"decayModeFindingNewDMs\",", "if len(event.goodVertices)>0 else event.vertices[0] tau.lepVeto = False tau.idDecayMode = tau.tauID(\"decayModeFinding\")", "are required inclusive_tauAntiMuonID = \"\", inclusive_tauAntiElectronID = \"\", # loose", "process(self, event): self.readCollections( event.input ) self.makeTaus(event) if not self.cfg_comp.isMC: return", "hadronic tau selection inclusive_ptMin = 18, inclusive_etaMax = 9999, inclusive_dxyMax", "False if self.cfg_ana.inclusive_vetoLeptons: for lep in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(),", "idMVA2 %d, idCI3hit %d, %s, %s\" % (tau.pt(), tau.idMVA2, tau.idCI3hit,", "id6(tau,X): \"\"\"Create an integer equal to 1-2-3-4-5-6 for (very loose,", "if not tau.tauID(self.cfg_ana.inclusive_tauAntiElectronID): tau.inclusive_lepVeto = True if tau.inclusive_lepVeto: continue if", "tau.inclusive_lepVeto = False if self.cfg_ana.inclusive_vetoLeptons: for lep in event.selectedLeptons: if", "= True) event.otherTaus.sort(key = lambda l : l.pt(), reverse =", "id5(tau,X): \"\"\"Create an integer equal to 1-2-3-4-5 for (very loose,", "self.counters.counter('events').inc('has >=1 other taus') def matchTaus(self, event): match = matchObjectCollection3(event.inclusiveTaus,", "self.cfg_ana.loose_dzMax and \\ tau.tauID(self.cfg_ana.loose_tauID) and not tau.loose_lepVeto: event.selectedTaus.append(tau) else: event.otherTaus.append(tau)", "18, inclusive_etaMax = 9999, inclusive_dxyMax = 1000., inclusive_dzMax = 0.4,", "event.input ) self.makeTaus(event) if not self.cfg_comp.isMC: return True if hasattr(event,", "self.makeTaus(event) if not self.cfg_comp.isMC: return True if hasattr(event, 'gentaus'): self.matchTaus(event)", "tau.idMVANewDM = id6(tau, \"by%sIsolationMVArun2v1DBnewDMwLT\") tau.idCI3hit = id3(tau, \"by%sCombinedIsolationDeltaBetaCorr3Hits\") tau.idAntiMu =", "#print \"Tau pt %5.1f: idMVA2 %d, idCI3hit %d, %s, %s\"", "def id5(tau,X): \"\"\"Create an integer equal to 1-2-3-4-5 for (very", "abs(tau.dz()) > self.cfg_ana.inclusive_dzMax: continue def id3(tau,X): \"\"\"Create an integer equal", "tau.associatedVertex = event.goodVertices[0] if len(event.goodVertices)>0 else event.vertices[0] tau.lepVeto = False", "not tau.tauID(self.cfg_ana.loose_tauAntiMuonID): tau.loose_lepVeto = True if not tau.tauID(self.cfg_ana.loose_tauAntiElectronID): tau.loose_lepVeto =", "= lambda l : l.pt(), reverse = True) event.selectedTaus.sort(key =", "len(event.inclusiveTaus): self.counters.counter('events').inc('has >=1 tau at preselection') if len(event.selectedTaus): self.counters.counter('events').inc('has >=1", ">=1 other taus') def matchTaus(self, event): match = matchObjectCollection3(event.inclusiveTaus, event.gentaus,", "\"decayModeFindingNewDMs\", # ignored if not set or \"\" loose_tauID =", "X) + tau.tauID(X%\"VVTight\") tau.idMVA = id6(tau, \"by%sIsolationMVArun2v1DBoldDMwLT\") tau.idMVANewDM = id6(tau,", "event): self.readCollections( event.input ) self.makeTaus(event) if not self.cfg_comp.isMC: return True", "#------------------ def makeTaus(self, event): event.inclusiveTaus = [] event.selectedTaus = []", "lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.loose_leptonVetoDR: tau.loose_lepVeto = True if self.cfg_ana.loose_vetoLeptonsPOG:", "inclusive_tauAntiMuonID = \"\", inclusive_tauAntiElectronID = \"\", # loose hadronic tau", "count.register('has >=1 other taus') #------------------ # MAKE LEPTON LISTS #------------------", "self.readCollections( event.input ) self.makeTaus(event) if not self.cfg_comp.isMC: return True if", "tau.loose_lepVeto = True if self.cfg_ana.loose_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.loose_tauAntiMuonID): tau.loose_lepVeto =", "): super(TauAnalyzer,self).__init__(cfg_ana,cfg_comp,looperName) #---------------------------------------- # DECLARATION OF HANDLES OF LEPTONS STUFF", "tau in event.inclusiveTaus: tau.loose_lepVeto = False if self.cfg_ana.loose_vetoLeptons: for lep", "tau.inclusive_lepVeto = True if not tau.tauID(self.cfg_ana.inclusive_tauAntiElectronID): tau.inclusive_lepVeto = True if", "self.counters.counter('events').inc('has >=1 tau at preselection') if len(event.selectedTaus): self.counters.counter('events').inc('has >=1 selected", "+ tau.tauID(X%\"VLoose\") + tau.tauID(X%\"VTight\") def id6(tau,X): \"\"\"Create an integer equal", "in event.inclusiveTaus: tau.loose_lepVeto = False if self.cfg_ana.loose_vetoLeptons: for lep in", "tau.tauID(self.cfg_ana.loose_tauAntiElectronID): tau.loose_lepVeto = True if tau.tauID(self.cfg_ana.loose_decayModeID) and \\ tau.pt() >", "# ignored if not set or \"\" loose_tauID = \"byLooseCombinedIsolationDeltaBetaCorr3Hits\",", "gen else 0 lep.genp = gen def process(self, event): self.readCollections(", "tau.idMVA2, tau.idCI3hit, tau.tauID(self.cfg_ana.tauID), tau.tauID(self.cfg_ana.tauLooseID)) if tau.tauID(self.cfg_ana.inclusive_tauID): event.inclusiveTaus.append(tau) for tau in", "% (tau.pt(), tau.idMVA2, tau.idCI3hit, tau.tauID(self.cfg_ana.tauID), tau.tauID(self.cfg_ana.tauLooseID)) if tau.tauID(self.cfg_ana.inclusive_tauID): event.inclusiveTaus.append(tau) for", "#make inclusive taus for tau in alltaus: tau.associatedVertex = event.goodVertices[0]", "tau.tauID(X%\"Tight\") def id5(tau,X): \"\"\"Create an integer equal to 1-2-3-4-5 for", "for tau in event.inclusiveTaus: tau.loose_lepVeto = False if self.cfg_ana.loose_vetoLeptons: for", "\"\", # loose hadronic tau selection loose_ptMin = 18, loose_etaMax", "tau.tauID(\"againstMuonTight3\") tau.idAntiE = id5(tau, \"againstElectron%sMVA6\") #print \"Tau pt %5.1f: idMVA2", "and not tau.loose_lepVeto: event.selectedTaus.append(tau) else: event.otherTaus.append(tau) event.inclusiveTaus.sort(key = lambda l", "DECLARATION OF HANDLES OF LEPTONS STUFF #---------------------------------------- def declareHandles(self): super(TauAnalyzer,", "count.register('has >=1 tau at preselection') count.register('has >=1 selected taus') count.register('has", "setattr(TauAnalyzer,\"defaultConfig\",cfg.Analyzer( class_object = TauAnalyzer, # inclusive very loose hadronic tau", "tau.tauID(self.cfg_ana.inclusive_tauID): event.inclusiveTaus.append(tau) for tau in event.inclusiveTaus: tau.loose_lepVeto = False if", "True if not tau.tauID(self.cfg_ana.loose_tauAntiElectronID): tau.loose_lepVeto = True if tau.tauID(self.cfg_ana.loose_decayModeID) and", "LISTS #------------------ def makeTaus(self, event): event.inclusiveTaus = [] event.selectedTaus =", "continue if tau.pt() < self.cfg_ana.inclusive_ptMin: continue if abs(tau.eta()) > self.cfg_ana.inclusive_etaMax:", "= False if self.cfg_ana.loose_vetoLeptons: for lep in event.selectedLeptons: if deltaR(lep.eta(),", "the following two IDs are required inclusive_tauAntiMuonID = \"\", inclusive_tauAntiElectronID", "reverse = True) self.counters.counter('events').inc('all events') if len(event.inclusiveTaus): self.counters.counter('events').inc('has >=1 tau", "event.vertices[0] tau.lepVeto = False tau.idDecayMode = tau.tauID(\"decayModeFinding\") tau.idDecayModeNewDMs = tau.tauID(\"decayModeFindingNewDMs\")", "True) self.counters.counter('events').inc('all events') if len(event.inclusiveTaus): self.counters.counter('events').inc('has >=1 tau at preselection')", "gen def process(self, event): self.readCollections( event.input ) self.makeTaus(event) if not", "OF LEPTONS STUFF #---------------------------------------- def declareHandles(self): super(TauAnalyzer, self).declareHandles() self.handles['taus'] =", "= True) self.counters.counter('events').inc('all events') if len(event.inclusiveTaus): self.counters.counter('events').inc('has >=1 tau at", "True if not tau.tauID(self.cfg_ana.inclusive_tauAntiElectronID): tau.inclusive_lepVeto = True if tau.inclusive_lepVeto: continue", "\"\"\"Create an integer equal to 1-2-3 for (loose,medium,tight)\"\"\" return tau.tauID(X%\"Loose\")", "are required loose_tauAntiMuonID = \"againstMuonLoose3\", loose_tauAntiElectronID = \"againstElectronLooseMVA5\" ) )", "if tau.inclusive_lepVeto: continue if self.cfg_ana.inclusive_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.inclusive_tauAntiMuonID): tau.inclusive_lepVeto =", "9999, loose_dxyMax = 1000., loose_dzMax = 0.2, loose_vetoLeptons = True,", "selected taus') if len(event.otherTaus): self.counters.counter('events').inc('has >=1 other taus') def matchTaus(self,", "and \\ abs(tau.dxy()) < self.cfg_ana.loose_dxyMax and abs(tau.dz()) < self.cfg_ana.loose_dzMax and", ") #make inclusive taus for tau in alltaus: tau.associatedVertex =", "for lep in event.inclusiveTaus: gen = match[lep] lep.mcMatchId = 1", "inclusive very loose hadronic tau selection inclusive_ptMin = 18, inclusive_etaMax", "self.cfg_ana.inclusive_decayModeID and not tau.tauID(self.cfg_ana.inclusive_decayModeID): continue tau.inclusive_lepVeto = False if self.cfg_ana.inclusive_vetoLeptons:", "from PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle from PhysicsTools.Heppy.physicsobjects.Tau", "hasattr(event, 'gentaus'): self.matchTaus(event) return True # Find the definitions of", "if gen else 0 lep.genp = gen def process(self, event):", "event): event.inclusiveTaus = [] event.selectedTaus = [] event.otherTaus = []", "#------------------ # MAKE LEPTON LISTS #------------------ def makeTaus(self, event): event.inclusiveTaus", "\"\"\"Create an integer equal to 1-2-3-4-5 for (very loose, loose,", "'inclusive_decayModeID') and self.cfg_ana.inclusive_decayModeID and not tau.tauID(self.cfg_ana.inclusive_decayModeID): continue tau.inclusive_lepVeto = False", "id3(tau, \"by%sCombinedIsolationDeltaBetaCorr3Hits\") tau.idAntiMu = tau.tauID(\"againstMuonLoose3\") + tau.tauID(\"againstMuonTight3\") tau.idAntiE = id5(tau,", "if tau.tauID(self.cfg_ana.loose_decayModeID) and \\ tau.pt() > self.cfg_ana.loose_ptMin and abs(tau.eta()) <", "True) event.selectedTaus.sort(key = lambda l : l.pt(), reverse = True)", "abs(tau.dz()) < self.cfg_ana.loose_dzMax and \\ tau.tauID(self.cfg_ana.loose_tauID) and not tau.loose_lepVeto: event.selectedTaus.append(tau)", "the tau ID strings here: # http://cmslxr.fnal.gov/lxr/source/PhysicsTools/PatAlgos/python/producersLayer1/tauProducer_cfi.py setattr(TauAnalyzer,\"defaultConfig\",cfg.Analyzer( class_object =", "tau.tauID(X%\"VTight\") def id6(tau,X): \"\"\"Create an integer equal to 1-2-3-4-5-6 for", "\"\", inclusive_tauAntiElectronID = \"\", # loose hadronic tau selection loose_ptMin", "continue if abs(tau.eta()) > self.cfg_ana.inclusive_etaMax: continue if abs(tau.dxy()) > self.cfg_ana.inclusive_dxyMax", "tau.idAntiE = id5(tau, \"againstElectron%sMVA6\") #print \"Tau pt %5.1f: idMVA2 %d,", "= 9999, inclusive_dxyMax = 1000., inclusive_dzMax = 0.4, inclusive_vetoLeptons =", "tau.tauID(X%\"VVTight\") tau.idMVA = id6(tau, \"by%sIsolationMVArun2v1DBoldDMwLT\") tau.idMVANewDM = id6(tau, \"by%sIsolationMVArun2v1DBnewDMwLT\") tau.idCI3hit", "deltaRMax = 0.5) for lep in event.inclusiveTaus: gen = match[lep]", "count.register('has >=1 selected taus') count.register('has >=1 other taus') #------------------ #", "%d, %s, %s\" % (tau.pt(), tau.idMVA2, tau.idCI3hit, tau.tauID(self.cfg_ana.tauID), tau.tauID(self.cfg_ana.tauLooseID)) if", "if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.loose_leptonVetoDR: tau.loose_lepVeto = True", "import Tau from PhysicsTools.HeppyCore.utils.deltar import deltaR, matchObjectCollection3 import PhysicsTools.HeppyCore.framework.config as", "if tau.pt() < self.cfg_ana.inclusive_ptMin: continue if abs(tau.eta()) > self.cfg_ana.inclusive_etaMax: continue", "lep in event.inclusiveTaus: gen = match[lep] lep.mcMatchId = 1 if", "self.counters.counter('events') count.register('all events') count.register('has >=1 tau at preselection') count.register('has >=1", "an integer equal to 1-2-3-4-5-6 for (very loose, loose, medium,", "AutoHandle( ('slimmedTaus',''),'std::vector<pat::Tau>') def beginLoop(self, setup): super(TauAnalyzer,self).beginLoop(setup) self.counters.addCounter('events') count = self.counters.counter('events')", "event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.loose_leptonVetoDR: tau.loose_lepVeto =", "class_object = TauAnalyzer, # inclusive very loose hadronic tau selection", "def matchTaus(self, event): match = matchObjectCollection3(event.inclusiveTaus, event.gentaus, deltaRMax = 0.5)", "# inclusive very loose hadronic tau selection inclusive_ptMin = 18,", "inclusive_vetoLeptonsPOG = False, # If True, the following two IDs", "class TauAnalyzer( Analyzer ): def __init__(self, cfg_ana, cfg_comp, looperName ):", "loose_tauID = \"byLooseCombinedIsolationDeltaBetaCorr3Hits\", loose_vetoLeptonsPOG = False, # If True, the", "as cfg class TauAnalyzer( Analyzer ): def __init__(self, cfg_ana, cfg_comp,", "\"by%sIsolationMVArun2v1DBoldDMwLT\") tau.idMVANewDM = id6(tau, \"by%sIsolationMVArun2v1DBnewDMwLT\") tau.idCI3hit = id3(tau, \"by%sCombinedIsolationDeltaBetaCorr3Hits\") tau.idAntiMu", "for tau in alltaus: tau.associatedVertex = event.goodVertices[0] if len(event.goodVertices)>0 else", "else: event.otherTaus.append(tau) event.inclusiveTaus.sort(key = lambda l : l.pt(), reverse =", "tau.tauID(self.cfg_ana.loose_decayModeID) and \\ tau.pt() > self.cfg_ana.loose_ptMin and abs(tau.eta()) < self.cfg_ana.loose_etaMax", "idCI3hit %d, %s, %s\" % (tau.pt(), tau.idMVA2, tau.idCI3hit, tau.tauID(self.cfg_ana.tauID), tau.tauID(self.cfg_ana.tauLooseID))", "inclusive_dzMax = 0.4, inclusive_vetoLeptons = False, inclusive_leptonVetoDR = 0.4, inclusive_decayModeID", "= lambda l : l.pt(), reverse = True) self.counters.counter('events').inc('all events')", "IDs are required loose_tauAntiMuonID = \"againstMuonLoose3\", loose_tauAntiElectronID = \"againstElectronLooseMVA5\" )", "loose_leptonVetoDR = 0.4, loose_decayModeID = \"decayModeFindingNewDMs\", # ignored if not", "1-2-3-4-5 for (very loose, loose, medium, tight, very tight)\"\"\" return", "+ tau.tauID(X%\"VTight\") def id6(tau,X): \"\"\"Create an integer equal to 1-2-3-4-5-6", "= id5(tau, \"againstElectron%sMVA6\") #print \"Tau pt %5.1f: idMVA2 %d, idCI3hit", "= False, # If True, the following two IDs are", "event.otherTaus = [] #get all alltaus = map( Tau, self.handles['taus'].product()", "deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.inclusive_leptonVetoDR: tau.inclusive_lepVeto = True if", "very tight, very very tight)\"\"\" return id5(tau, X) + tau.tauID(X%\"VVTight\")", "tau.inclusive_lepVeto = True if tau.inclusive_lepVeto: continue if self.cfg_ana.inclusive_vetoLeptonsPOG: if not", "= 0.5) for lep in event.inclusiveTaus: gen = match[lep] lep.mcMatchId", "not tau.tauID(self.cfg_ana.inclusive_decayModeID): continue tau.inclusive_lepVeto = False if self.cfg_ana.inclusive_vetoLeptons: for lep", "alltaus = map( Tau, self.handles['taus'].product() ) #make inclusive taus for", "= [] #get all alltaus = map( Tau, self.handles['taus'].product() )", "inclusive_etaMax = 9999, inclusive_dxyMax = 1000., inclusive_dzMax = 0.4, inclusive_vetoLeptons", "if len(event.selectedTaus): self.counters.counter('events').inc('has >=1 selected taus') if len(event.otherTaus): self.counters.counter('events').inc('has >=1", "makeTaus(self, event): event.inclusiveTaus = [] event.selectedTaus = [] event.otherTaus =", "return True # Find the definitions of the tau ID", "True, the following two IDs are required loose_tauAntiMuonID = \"againstMuonLoose3\",", "1-2-3 for (loose,medium,tight)\"\"\" return tau.tauID(X%\"Loose\") + tau.tauID(X%\"Medium\") + tau.tauID(X%\"Tight\") def", "if len(event.otherTaus): self.counters.counter('events').inc('has >=1 other taus') def matchTaus(self, event): match", "# loose hadronic tau selection loose_ptMin = 18, loose_etaMax =", "tau.idAntiMu = tau.tauID(\"againstMuonLoose3\") + tau.tauID(\"againstMuonTight3\") tau.idAntiE = id5(tau, \"againstElectron%sMVA6\") #print", "= 0.4, loose_decayModeID = \"decayModeFindingNewDMs\", # ignored if not set", "= tau.tauID(\"decayModeFindingNewDMs\") if hasattr(self.cfg_ana, 'inclusive_decayModeID') and self.cfg_ana.inclusive_decayModeID and not tau.tauID(self.cfg_ana.inclusive_decayModeID):", "STUFF #---------------------------------------- def declareHandles(self): super(TauAnalyzer, self).declareHandles() self.handles['taus'] = AutoHandle( ('slimmedTaus',''),'std::vector<pat::Tau>')", "tau.tauID(X%\"Medium\") + tau.tauID(X%\"Tight\") def id5(tau,X): \"\"\"Create an integer equal to", "tau.tauID(\"againstMuonLoose3\") + tau.tauID(\"againstMuonTight3\") tau.idAntiE = id5(tau, \"againstElectron%sMVA6\") #print \"Tau pt", "equal to 1-2-3-4-5 for (very loose, loose, medium, tight, very", "reverse = True) event.selectedTaus.sort(key = lambda l : l.pt(), reverse", "tau.idCI3hit = id3(tau, \"by%sCombinedIsolationDeltaBetaCorr3Hits\") tau.idAntiMu = tau.tauID(\"againstMuonLoose3\") + tau.tauID(\"againstMuonTight3\") tau.idAntiE", "tau.phi()) < self.cfg_ana.inclusive_leptonVetoDR: tau.inclusive_lepVeto = True if tau.inclusive_lepVeto: continue if", "if not tau.tauID(self.cfg_ana.loose_tauAntiMuonID): tau.loose_lepVeto = True if not tau.tauID(self.cfg_ana.loose_tauAntiElectronID): tau.loose_lepVeto", "map( Tau, self.handles['taus'].product() ) #make inclusive taus for tau in", ">=1 tau at preselection') count.register('has >=1 selected taus') count.register('has >=1", "\"decayModeFindingNewDMs\", # ignored if not set or \"\" inclusive_tauID =", "False if self.cfg_ana.loose_vetoLeptons: for lep in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(),", "False, inclusive_leptonVetoDR = 0.4, inclusive_decayModeID = \"decayModeFindingNewDMs\", # ignored if", "if self.cfg_ana.loose_vetoLeptons: for lep in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(),", "1000., loose_dzMax = 0.2, loose_vetoLeptons = True, loose_leptonVetoDR = 0.4,", "cfg_ana, cfg_comp, looperName ): super(TauAnalyzer,self).__init__(cfg_ana,cfg_comp,looperName) #---------------------------------------- # DECLARATION OF HANDLES", "PhysicsTools.HeppyCore.framework.config as cfg class TauAnalyzer( Analyzer ): def __init__(self, cfg_ana,", "event.goodVertices[0] if len(event.goodVertices)>0 else event.vertices[0] tau.lepVeto = False tau.idDecayMode =", "loose_ptMin = 18, loose_etaMax = 9999, loose_dxyMax = 1000., loose_dzMax", "OF HANDLES OF LEPTONS STUFF #---------------------------------------- def declareHandles(self): super(TauAnalyzer, self).declareHandles()", "\"againstElectron%sMVA6\") #print \"Tau pt %5.1f: idMVA2 %d, idCI3hit %d, %s,", "self.cfg_ana.loose_vetoLeptons: for lep in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi())", "gen = match[lep] lep.mcMatchId = 1 if gen else 0", "import Analyzer from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle from PhysicsTools.Heppy.physicsobjects.Tau import Tau", "not self.cfg_comp.isMC: return True if hasattr(event, 'gentaus'): self.matchTaus(event) return True", "self.cfg_ana.loose_dxyMax and abs(tau.dz()) < self.cfg_ana.loose_dzMax and \\ tau.tauID(self.cfg_ana.loose_tauID) and not", "True # Find the definitions of the tau ID strings", "\"\" inclusive_tauID = \"decayModeFindingNewDMs\", inclusive_vetoLeptonsPOG = False, # If True,", "loose_dzMax = 0.2, loose_vetoLeptons = True, loose_leptonVetoDR = 0.4, loose_decayModeID", "= True, loose_leptonVetoDR = 0.4, loose_decayModeID = \"decayModeFindingNewDMs\", # ignored", "True, loose_leptonVetoDR = 0.4, loose_decayModeID = \"decayModeFindingNewDMs\", # ignored if", "def id6(tau,X): \"\"\"Create an integer equal to 1-2-3-4-5-6 for (very", "l.pt(), reverse = True) self.counters.counter('events').inc('all events') if len(event.inclusiveTaus): self.counters.counter('events').inc('has >=1", "(very loose, loose, medium, tight, very tight)\"\"\" return id3(tau, X)", "len(event.otherTaus): self.counters.counter('events').inc('has >=1 other taus') def matchTaus(self, event): match =", "tau selection loose_ptMin = 18, loose_etaMax = 9999, loose_dxyMax =", "False, # If True, the following two IDs are required", "id5(tau, X) + tau.tauID(X%\"VVTight\") tau.idMVA = id6(tau, \"by%sIsolationMVArun2v1DBoldDMwLT\") tau.idMVANewDM =", "< self.cfg_ana.loose_leptonVetoDR: tau.loose_lepVeto = True if self.cfg_ana.loose_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.loose_tauAntiMuonID):", "= [] event.otherTaus = [] #get all alltaus = map(", "\"byLooseCombinedIsolationDeltaBetaCorr3Hits\", loose_vetoLeptonsPOG = False, # If True, the following two", "to 1-2-3-4-5 for (very loose, loose, medium, tight, very tight)\"\"\"", "declareHandles(self): super(TauAnalyzer, self).declareHandles() self.handles['taus'] = AutoHandle( ('slimmedTaus',''),'std::vector<pat::Tau>') def beginLoop(self, setup):", "tau.tauID(self.cfg_ana.inclusive_decayModeID): continue tau.inclusive_lepVeto = False if self.cfg_ana.inclusive_vetoLeptons: for lep in", "not set or \"\" inclusive_tauID = \"decayModeFindingNewDMs\", inclusive_vetoLeptonsPOG = False,", "preselection') count.register('has >=1 selected taus') count.register('has >=1 other taus') #------------------", "= 9999, loose_dxyMax = 1000., loose_dzMax = 0.2, loose_vetoLeptons =", "from PhysicsTools.Heppy.physicsobjects.Tau import Tau from PhysicsTools.HeppyCore.utils.deltar import deltaR, matchObjectCollection3 import", "abs(tau.eta()) < self.cfg_ana.loose_etaMax and \\ abs(tau.dxy()) < self.cfg_ana.loose_dxyMax and abs(tau.dz())", "equal to 1-2-3-4-5-6 for (very loose, loose, medium, tight, very", "id3(tau, X) + tau.tauID(X%\"VLoose\") + tau.tauID(X%\"VTight\") def id6(tau,X): \"\"\"Create an", "self.cfg_ana.loose_ptMin and abs(tau.eta()) < self.cfg_ana.loose_etaMax and \\ abs(tau.dxy()) < self.cfg_ana.loose_dxyMax", "tau.loose_lepVeto = False if self.cfg_ana.loose_vetoLeptons: for lep in event.selectedLeptons: if", "import PhysicsTools.HeppyCore.framework.config as cfg class TauAnalyzer( Analyzer ): def __init__(self,", "# DECLARATION OF HANDLES OF LEPTONS STUFF #---------------------------------------- def declareHandles(self):", "http://cmslxr.fnal.gov/lxr/source/PhysicsTools/PatAlgos/python/producersLayer1/tauProducer_cfi.py setattr(TauAnalyzer,\"defaultConfig\",cfg.Analyzer( class_object = TauAnalyzer, # inclusive very loose hadronic", "if hasattr(self.cfg_ana, 'inclusive_decayModeID') and self.cfg_ana.inclusive_decayModeID and not tau.tauID(self.cfg_ana.inclusive_decayModeID): continue tau.inclusive_lepVeto", "lep.mcMatchId = 1 if gen else 0 lep.genp = gen", "True, the following two IDs are required inclusive_tauAntiMuonID = \"\",", "tau.tauID(\"decayModeFindingNewDMs\") if hasattr(self.cfg_ana, 'inclusive_decayModeID') and self.cfg_ana.inclusive_decayModeID and not tau.tauID(self.cfg_ana.inclusive_decayModeID): continue", "len(event.goodVertices)>0 else event.vertices[0] tau.lepVeto = False tau.idDecayMode = tau.tauID(\"decayModeFinding\") tau.idDecayModeNewDMs", "very tight)\"\"\" return id5(tau, X) + tau.tauID(X%\"VVTight\") tau.idMVA = id6(tau,", "if not self.cfg_comp.isMC: return True if hasattr(event, 'gentaus'): self.matchTaus(event) return", "tight, very very tight)\"\"\" return id5(tau, X) + tau.tauID(X%\"VVTight\") tau.idMVA", "in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.inclusive_leptonVetoDR: tau.inclusive_lepVeto", "\\ abs(tau.dxy()) < self.cfg_ana.loose_dxyMax and abs(tau.dz()) < self.cfg_ana.loose_dzMax and \\", "not tau.tauID(self.cfg_ana.inclusive_tauAntiElectronID): tau.inclusive_lepVeto = True if tau.inclusive_lepVeto: continue if tau.pt()", "for lep in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) <", "+ tau.tauID(X%\"Medium\") + tau.tauID(X%\"Tight\") def id5(tau,X): \"\"\"Create an integer equal", "\"decayModeFindingNewDMs\", inclusive_vetoLeptonsPOG = False, # If True, the following two", "continue if self.cfg_ana.inclusive_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.inclusive_tauAntiMuonID): tau.inclusive_lepVeto = True if", "hadronic tau selection loose_ptMin = 18, loose_etaMax = 9999, loose_dxyMax", "X) + tau.tauID(X%\"VLoose\") + tau.tauID(X%\"VTight\") def id6(tau,X): \"\"\"Create an integer", "#---------------------------------------- def declareHandles(self): super(TauAnalyzer, self).declareHandles() self.handles['taus'] = AutoHandle( ('slimmedTaus',''),'std::vector<pat::Tau>') def", "event.inclusiveTaus.append(tau) for tau in event.inclusiveTaus: tau.loose_lepVeto = False if self.cfg_ana.loose_vetoLeptons:", "= \"\", # loose hadronic tau selection loose_ptMin = 18,", "+ tau.tauID(X%\"Tight\") def id5(tau,X): \"\"\"Create an integer equal to 1-2-3-4-5", "1000., inclusive_dzMax = 0.4, inclusive_vetoLeptons = False, inclusive_leptonVetoDR = 0.4,", "return id3(tau, X) + tau.tauID(X%\"VLoose\") + tau.tauID(X%\"VTight\") def id6(tau,X): \"\"\"Create", "inclusive_ptMin = 18, inclusive_etaMax = 9999, inclusive_dxyMax = 1000., inclusive_dzMax", "tau.idDecayModeNewDMs = tau.tauID(\"decayModeFindingNewDMs\") if hasattr(self.cfg_ana, 'inclusive_decayModeID') and self.cfg_ana.inclusive_decayModeID and not", "return id5(tau, X) + tau.tauID(X%\"VVTight\") tau.idMVA = id6(tau, \"by%sIsolationMVArun2v1DBoldDMwLT\") tau.idMVANewDM", "for (very loose, loose, medium, tight, very tight)\"\"\" return id3(tau,", "other taus') #------------------ # MAKE LEPTON LISTS #------------------ def makeTaus(self,", "[] event.otherTaus = [] #get all alltaus = map( Tau,", "continue tau.inclusive_lepVeto = False if self.cfg_ana.inclusive_vetoLeptons: for lep in event.selectedLeptons:", "at preselection') if len(event.selectedTaus): self.counters.counter('events').inc('has >=1 selected taus') if len(event.otherTaus):", "in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.loose_leptonVetoDR: tau.loose_lepVeto", "def __init__(self, cfg_ana, cfg_comp, looperName ): super(TauAnalyzer,self).__init__(cfg_ana,cfg_comp,looperName) #---------------------------------------- # DECLARATION", "self.cfg_ana.loose_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.loose_tauAntiMuonID): tau.loose_lepVeto = True if not tau.tauID(self.cfg_ana.loose_tauAntiElectronID):", "if self.cfg_ana.loose_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.loose_tauAntiMuonID): tau.loose_lepVeto = True if not", "tau.tauID(X%\"VLoose\") + tau.tauID(X%\"VTight\") def id6(tau,X): \"\"\"Create an integer equal to", "= self.counters.counter('events') count.register('all events') count.register('has >=1 tau at preselection') count.register('has", "loose_vetoLeptonsPOG = False, # If True, the following two IDs", "self.handles['taus'] = AutoHandle( ('slimmedTaus',''),'std::vector<pat::Tau>') def beginLoop(self, setup): super(TauAnalyzer,self).beginLoop(setup) self.counters.addCounter('events') count", "True if tau.inclusive_lepVeto: continue if self.cfg_ana.inclusive_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.inclusive_tauAntiMuonID): tau.inclusive_lepVeto", "< self.cfg_ana.loose_etaMax and \\ abs(tau.dxy()) < self.cfg_ana.loose_dxyMax and abs(tau.dz()) <", "from PhysicsTools.HeppyCore.utils.deltar import deltaR, matchObjectCollection3 import PhysicsTools.HeppyCore.framework.config as cfg class", "in event.inclusiveTaus: gen = match[lep] lep.mcMatchId = 1 if gen", "beginLoop(self, setup): super(TauAnalyzer,self).beginLoop(setup) self.counters.addCounter('events') count = self.counters.counter('events') count.register('all events') count.register('has", "and not tau.tauID(self.cfg_ana.inclusive_decayModeID): continue tau.inclusive_lepVeto = False if self.cfg_ana.inclusive_vetoLeptons: for", "following two IDs are required loose_tauAntiMuonID = \"againstMuonLoose3\", loose_tauAntiElectronID =", "[] #get all alltaus = map( Tau, self.handles['taus'].product() ) #make", "= event.goodVertices[0] if len(event.goodVertices)>0 else event.vertices[0] tau.lepVeto = False tau.idDecayMode", "loose_etaMax = 9999, loose_dxyMax = 1000., loose_dzMax = 0.2, loose_vetoLeptons", "= tau.tauID(\"decayModeFinding\") tau.idDecayModeNewDMs = tau.tauID(\"decayModeFindingNewDMs\") if hasattr(self.cfg_ana, 'inclusive_decayModeID') and self.cfg_ana.inclusive_decayModeID", "Find the definitions of the tau ID strings here: #", "def makeTaus(self, event): event.inclusiveTaus = [] event.selectedTaus = [] event.otherTaus", "loose hadronic tau selection inclusive_ptMin = 18, inclusive_etaMax = 9999,", "True if tau.inclusive_lepVeto: continue if tau.pt() < self.cfg_ana.inclusive_ptMin: continue if", "all alltaus = map( Tau, self.handles['taus'].product() ) #make inclusive taus", "not set or \"\" loose_tauID = \"byLooseCombinedIsolationDeltaBetaCorr3Hits\", loose_vetoLeptonsPOG = False,", "tight, very tight, very very tight)\"\"\" return id5(tau, X) +", "if not tau.tauID(self.cfg_ana.loose_tauAntiElectronID): tau.loose_lepVeto = True if tau.tauID(self.cfg_ana.loose_decayModeID) and \\", "taus') count.register('has >=1 other taus') #------------------ # MAKE LEPTON LISTS", "integer equal to 1-2-3-4-5 for (very loose, loose, medium, tight,", "lep in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.loose_leptonVetoDR:", "lambda l : l.pt(), reverse = True) self.counters.counter('events').inc('all events') if", "l.pt(), reverse = True) event.selectedTaus.sort(key = lambda l : l.pt(),", "if abs(tau.eta()) > self.cfg_ana.inclusive_etaMax: continue if abs(tau.dxy()) > self.cfg_ana.inclusive_dxyMax or", "ID strings here: # http://cmslxr.fnal.gov/lxr/source/PhysicsTools/PatAlgos/python/producersLayer1/tauProducer_cfi.py setattr(TauAnalyzer,\"defaultConfig\",cfg.Analyzer( class_object = TauAnalyzer, #", "very tight)\"\"\" return id3(tau, X) + tau.tauID(X%\"VLoose\") + tau.tauID(X%\"VTight\") def", "and \\ tau.pt() > self.cfg_ana.loose_ptMin and abs(tau.eta()) < self.cfg_ana.loose_etaMax and", "match = matchObjectCollection3(event.inclusiveTaus, event.gentaus, deltaRMax = 0.5) for lep in", "if not set or \"\" inclusive_tauID = \"decayModeFindingNewDMs\", inclusive_vetoLeptonsPOG =", "= id6(tau, \"by%sIsolationMVArun2v1DBnewDMwLT\") tau.idCI3hit = id3(tau, \"by%sCombinedIsolationDeltaBetaCorr3Hits\") tau.idAntiMu = tau.tauID(\"againstMuonLoose3\")", "tau.loose_lepVeto: event.selectedTaus.append(tau) else: event.otherTaus.append(tau) event.inclusiveTaus.sort(key = lambda l : l.pt(),", "def id3(tau,X): \"\"\"Create an integer equal to 1-2-3 for (loose,medium,tight)\"\"\"", "= False if self.cfg_ana.inclusive_vetoLeptons: for lep in event.selectedLeptons: if deltaR(lep.eta(),", "super(TauAnalyzer,self).beginLoop(setup) self.counters.addCounter('events') count = self.counters.counter('events') count.register('all events') count.register('has >=1 tau", "tau.tauID(self.cfg_ana.inclusive_tauAntiMuonID): tau.inclusive_lepVeto = True if not tau.tauID(self.cfg_ana.inclusive_tauAntiElectronID): tau.inclusive_lepVeto = True", "tight, very tight)\"\"\" return id3(tau, X) + tau.tauID(X%\"VLoose\") + tau.tauID(X%\"VTight\")", "def declareHandles(self): super(TauAnalyzer, self).declareHandles() self.handles['taus'] = AutoHandle( ('slimmedTaus',''),'std::vector<pat::Tau>') def beginLoop(self,", "inclusive_tauID = \"decayModeFindingNewDMs\", inclusive_vetoLeptonsPOG = False, # If True, the", "#---------------------------------------- # DECLARATION OF HANDLES OF LEPTONS STUFF #---------------------------------------- def", "): def __init__(self, cfg_ana, cfg_comp, looperName ): super(TauAnalyzer,self).__init__(cfg_ana,cfg_comp,looperName) #---------------------------------------- #", "abs(tau.eta()) > self.cfg_ana.inclusive_etaMax: continue if abs(tau.dxy()) > self.cfg_ana.inclusive_dxyMax or abs(tau.dz())", "self.counters.counter('events').inc('all events') if len(event.inclusiveTaus): self.counters.counter('events').inc('has >=1 tau at preselection') if", "l : l.pt(), reverse = True) event.otherTaus.sort(key = lambda l", "loose hadronic tau selection loose_ptMin = 18, loose_etaMax = 9999,", "tau.pt() > self.cfg_ana.loose_ptMin and abs(tau.eta()) < self.cfg_ana.loose_etaMax and \\ abs(tau.dxy())", "cfg class TauAnalyzer( Analyzer ): def __init__(self, cfg_ana, cfg_comp, looperName", "= False tau.idDecayMode = tau.tauID(\"decayModeFinding\") tau.idDecayModeNewDMs = tau.tauID(\"decayModeFindingNewDMs\") if hasattr(self.cfg_ana,", "matchTaus(self, event): match = matchObjectCollection3(event.inclusiveTaus, event.gentaus, deltaRMax = 0.5) for", "< self.cfg_ana.inclusive_leptonVetoDR: tau.inclusive_lepVeto = True if tau.inclusive_lepVeto: continue if self.cfg_ana.inclusive_vetoLeptonsPOG:", "tau ID strings here: # http://cmslxr.fnal.gov/lxr/source/PhysicsTools/PatAlgos/python/producersLayer1/tauProducer_cfi.py setattr(TauAnalyzer,\"defaultConfig\",cfg.Analyzer( class_object = TauAnalyzer,", "%d, idCI3hit %d, %s, %s\" % (tau.pt(), tau.idMVA2, tau.idCI3hit, tau.tauID(self.cfg_ana.tauID),", "an integer equal to 1-2-3 for (loose,medium,tight)\"\"\" return tau.tauID(X%\"Loose\") +", "= True if not tau.tauID(self.cfg_ana.inclusive_tauAntiElectronID): tau.inclusive_lepVeto = True if tau.inclusive_lepVeto:", "= True if self.cfg_ana.loose_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.loose_tauAntiMuonID): tau.loose_lepVeto = True", "event.otherTaus.append(tau) event.inclusiveTaus.sort(key = lambda l : l.pt(), reverse = True)", "self.cfg_ana.inclusive_dzMax: continue def id3(tau,X): \"\"\"Create an integer equal to 1-2-3", "PhysicsTools.Heppy.physicsobjects.Tau import Tau from PhysicsTools.HeppyCore.utils.deltar import deltaR, matchObjectCollection3 import PhysicsTools.HeppyCore.framework.config", "or \"\" loose_tauID = \"byLooseCombinedIsolationDeltaBetaCorr3Hits\", loose_vetoLeptonsPOG = False, # If", "event.selectedTaus = [] event.otherTaus = [] #get all alltaus =", "inclusive_decayModeID = \"decayModeFindingNewDMs\", # ignored if not set or \"\"", "__init__(self, cfg_ana, cfg_comp, looperName ): super(TauAnalyzer,self).__init__(cfg_ana,cfg_comp,looperName) #---------------------------------------- # DECLARATION OF", "1 if gen else 0 lep.genp = gen def process(self,", "ignored if not set or \"\" inclusive_tauID = \"decayModeFindingNewDMs\", inclusive_vetoLeptonsPOG", "deltaR, matchObjectCollection3 import PhysicsTools.HeppyCore.framework.config as cfg class TauAnalyzer( Analyzer ):", "and abs(tau.dz()) < self.cfg_ana.loose_dzMax and \\ tau.tauID(self.cfg_ana.loose_tauID) and not tau.loose_lepVeto:", "abs(tau.dxy()) < self.cfg_ana.loose_dxyMax and abs(tau.dz()) < self.cfg_ana.loose_dzMax and \\ tau.tauID(self.cfg_ana.loose_tauID)", "event): match = matchObjectCollection3(event.inclusiveTaus, event.gentaus, deltaRMax = 0.5) for lep", "= 0.2, loose_vetoLeptons = True, loose_leptonVetoDR = 0.4, loose_decayModeID =", "loose_dxyMax = 1000., loose_dzMax = 0.2, loose_vetoLeptons = True, loose_leptonVetoDR", "count.register('all events') count.register('has >=1 tau at preselection') count.register('has >=1 selected", "tau.tauID(self.cfg_ana.loose_tauAntiMuonID): tau.loose_lepVeto = True if not tau.tauID(self.cfg_ana.loose_tauAntiElectronID): tau.loose_lepVeto = True", "= \"decayModeFindingNewDMs\", # ignored if not set or \"\" inclusive_tauID", "0.5) for lep in event.inclusiveTaus: gen = match[lep] lep.mcMatchId =", "id6(tau, \"by%sIsolationMVArun2v1DBnewDMwLT\") tau.idCI3hit = id3(tau, \"by%sCombinedIsolationDeltaBetaCorr3Hits\") tau.idAntiMu = tau.tauID(\"againstMuonLoose3\") +", "= id6(tau, \"by%sIsolationMVArun2v1DBoldDMwLT\") tau.idMVANewDM = id6(tau, \"by%sIsolationMVArun2v1DBnewDMwLT\") tau.idCI3hit = id3(tau,", "[] event.selectedTaus = [] event.otherTaus = [] #get all alltaus", "event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.inclusive_leptonVetoDR: tau.inclusive_lepVeto =", "0.4, inclusive_decayModeID = \"decayModeFindingNewDMs\", # ignored if not set or", "inclusive_tauAntiElectronID = \"\", # loose hadronic tau selection loose_ptMin =", "< self.cfg_ana.inclusive_ptMin: continue if abs(tau.eta()) > self.cfg_ana.inclusive_etaMax: continue if abs(tau.dxy())", "LEPTONS STUFF #---------------------------------------- def declareHandles(self): super(TauAnalyzer, self).declareHandles() self.handles['taus'] = AutoHandle(", "= [] event.selectedTaus = [] event.otherTaus = [] #get all", "True if self.cfg_ana.loose_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.loose_tauAntiMuonID): tau.loose_lepVeto = True if", ">=1 tau at preselection') if len(event.selectedTaus): self.counters.counter('events').inc('has >=1 selected taus')", "= match[lep] lep.mcMatchId = 1 if gen else 0 lep.genp", "= lambda l : l.pt(), reverse = True) event.otherTaus.sort(key =", "> self.cfg_ana.inclusive_dzMax: continue def id3(tau,X): \"\"\"Create an integer equal to", "= True if tau.inclusive_lepVeto: continue if self.cfg_ana.inclusive_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.inclusive_tauAntiMuonID):", "inclusive taus for tau in alltaus: tau.associatedVertex = event.goodVertices[0] if", "if deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.inclusive_leptonVetoDR: tau.inclusive_lepVeto = True", "< self.cfg_ana.loose_dxyMax and abs(tau.dz()) < self.cfg_ana.loose_dzMax and \\ tau.tauID(self.cfg_ana.loose_tauID) and", "Tau, self.handles['taus'].product() ) #make inclusive taus for tau in alltaus:", "id5(tau, \"againstElectron%sMVA6\") #print \"Tau pt %5.1f: idMVA2 %d, idCI3hit %d,", "tau.eta(), tau.phi()) < self.cfg_ana.inclusive_leptonVetoDR: tau.inclusive_lepVeto = True if tau.inclusive_lepVeto: continue", "import deltaR, matchObjectCollection3 import PhysicsTools.HeppyCore.framework.config as cfg class TauAnalyzer( Analyzer", "if self.cfg_ana.inclusive_vetoLeptons: for lep in event.selectedLeptons: if deltaR(lep.eta(), lep.phi(), tau.eta(),", "self.cfg_ana.inclusive_dxyMax or abs(tau.dz()) > self.cfg_ana.inclusive_dzMax: continue def id3(tau,X): \"\"\"Create an", "= 18, inclusive_etaMax = 9999, inclusive_dxyMax = 1000., inclusive_dzMax =", "deltaR(lep.eta(), lep.phi(), tau.eta(), tau.phi()) < self.cfg_ana.loose_leptonVetoDR: tau.loose_lepVeto = True if", "= \"byLooseCombinedIsolationDeltaBetaCorr3Hits\", loose_vetoLeptonsPOG = False, # If True, the following", "to 1-2-3-4-5-6 for (very loose, loose, medium, tight, very tight,", "= True if not tau.tauID(self.cfg_ana.loose_tauAntiElectronID): tau.loose_lepVeto = True if tau.tauID(self.cfg_ana.loose_decayModeID)", "lambda l : l.pt(), reverse = True) event.otherTaus.sort(key = lambda", "two IDs are required inclusive_tauAntiMuonID = \"\", inclusive_tauAntiElectronID = \"\",", "loose, medium, tight, very tight, very very tight)\"\"\" return id5(tau,", "= True if tau.inclusive_lepVeto: continue if tau.pt() < self.cfg_ana.inclusive_ptMin: continue", "medium, tight, very tight)\"\"\" return id3(tau, X) + tau.tauID(X%\"VLoose\") +", "taus') if len(event.otherTaus): self.counters.counter('events').inc('has >=1 other taus') def matchTaus(self, event):", "event.inclusiveTaus.sort(key = lambda l : l.pt(), reverse = True) event.selectedTaus.sort(key", "0.4, loose_decayModeID = \"decayModeFindingNewDMs\", # ignored if not set or", "ignored if not set or \"\" loose_tauID = \"byLooseCombinedIsolationDeltaBetaCorr3Hits\", loose_vetoLeptonsPOG", "+ tau.tauID(\"againstMuonTight3\") tau.idAntiE = id5(tau, \"againstElectron%sMVA6\") #print \"Tau pt %5.1f:", "required inclusive_tauAntiMuonID = \"\", inclusive_tauAntiElectronID = \"\", # loose hadronic", "and \\ tau.tauID(self.cfg_ana.loose_tauID) and not tau.loose_lepVeto: event.selectedTaus.append(tau) else: event.otherTaus.append(tau) event.inclusiveTaus.sort(key", "preselection') if len(event.selectedTaus): self.counters.counter('events').inc('has >=1 selected taus') if len(event.otherTaus): self.counters.counter('events').inc('has", "return tau.tauID(X%\"Loose\") + tau.tauID(X%\"Medium\") + tau.tauID(X%\"Tight\") def id5(tau,X): \"\"\"Create an", "# If True, the following two IDs are required loose_tauAntiMuonID", "If True, the following two IDs are required loose_tauAntiMuonID =", "not tau.loose_lepVeto: event.selectedTaus.append(tau) else: event.otherTaus.append(tau) event.inclusiveTaus.sort(key = lambda l :", "TauAnalyzer( Analyzer ): def __init__(self, cfg_ana, cfg_comp, looperName ): super(TauAnalyzer,self).__init__(cfg_ana,cfg_comp,looperName)", "tau.tauID(self.cfg_ana.tauID), tau.tauID(self.cfg_ana.tauLooseID)) if tau.tauID(self.cfg_ana.inclusive_tauID): event.inclusiveTaus.append(tau) for tau in event.inclusiveTaus: tau.loose_lepVeto", "tau at preselection') count.register('has >=1 selected taus') count.register('has >=1 other", ">=1 selected taus') if len(event.otherTaus): self.counters.counter('events').inc('has >=1 other taus') def", "def process(self, event): self.readCollections( event.input ) self.makeTaus(event) if not self.cfg_comp.isMC:", "= True if tau.tauID(self.cfg_ana.loose_decayModeID) and \\ tau.pt() > self.cfg_ana.loose_ptMin and", "or \"\" inclusive_tauID = \"decayModeFindingNewDMs\", inclusive_vetoLeptonsPOG = False, # If", "tau.tauID(self.cfg_ana.inclusive_tauAntiElectronID): tau.inclusive_lepVeto = True if tau.inclusive_lepVeto: continue if tau.pt() <", "self.handles['taus'].product() ) #make inclusive taus for tau in alltaus: tau.associatedVertex", "tau.inclusive_lepVeto: continue if tau.pt() < self.cfg_ana.inclusive_ptMin: continue if abs(tau.eta()) >", "0.4, inclusive_vetoLeptons = False, inclusive_leptonVetoDR = 0.4, inclusive_decayModeID = \"decayModeFindingNewDMs\",", "continue def id3(tau,X): \"\"\"Create an integer equal to 1-2-3 for", "match[lep] lep.mcMatchId = 1 if gen else 0 lep.genp =", "tau.inclusive_lepVeto = True if tau.inclusive_lepVeto: continue if tau.pt() < self.cfg_ana.inclusive_ptMin:", "'gentaus'): self.matchTaus(event) return True # Find the definitions of the", "events') if len(event.inclusiveTaus): self.counters.counter('events').inc('has >=1 tau at preselection') if len(event.selectedTaus):", "else event.vertices[0] tau.lepVeto = False tau.idDecayMode = tau.tauID(\"decayModeFinding\") tau.idDecayModeNewDMs =", "tau.tauID(self.cfg_ana.loose_tauID) and not tau.loose_lepVeto: event.selectedTaus.append(tau) else: event.otherTaus.append(tau) event.inclusiveTaus.sort(key = lambda", "# http://cmslxr.fnal.gov/lxr/source/PhysicsTools/PatAlgos/python/producersLayer1/tauProducer_cfi.py setattr(TauAnalyzer,\"defaultConfig\",cfg.Analyzer( class_object = TauAnalyzer, # inclusive very loose", "= tau.tauID(\"againstMuonLoose3\") + tau.tauID(\"againstMuonTight3\") tau.idAntiE = id5(tau, \"againstElectron%sMVA6\") #print \"Tau", "%s\" % (tau.pt(), tau.idMVA2, tau.idCI3hit, tau.tauID(self.cfg_ana.tauID), tau.tauID(self.cfg_ana.tauLooseID)) if tau.tauID(self.cfg_ana.inclusive_tauID): event.inclusiveTaus.append(tau)", "set or \"\" loose_tauID = \"byLooseCombinedIsolationDeltaBetaCorr3Hits\", loose_vetoLeptonsPOG = False, #", "loose, loose, medium, tight, very tight, very very tight)\"\"\" return", "= 1000., inclusive_dzMax = 0.4, inclusive_vetoLeptons = False, inclusive_leptonVetoDR =", "True if tau.tauID(self.cfg_ana.loose_decayModeID) and \\ tau.pt() > self.cfg_ana.loose_ptMin and abs(tau.eta())", "continue if abs(tau.dxy()) > self.cfg_ana.inclusive_dxyMax or abs(tau.dz()) > self.cfg_ana.inclusive_dzMax: continue", "events') count.register('has >=1 tau at preselection') count.register('has >=1 selected taus')", "id3(tau,X): \"\"\"Create an integer equal to 1-2-3 for (loose,medium,tight)\"\"\" return", "selected taus') count.register('has >=1 other taus') #------------------ # MAKE LEPTON", "Analyzer from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle from PhysicsTools.Heppy.physicsobjects.Tau import Tau from", "> self.cfg_ana.inclusive_etaMax: continue if abs(tau.dxy()) > self.cfg_ana.inclusive_dxyMax or abs(tau.dz()) >", "TauAnalyzer, # inclusive very loose hadronic tau selection inclusive_ptMin =", "tight)\"\"\" return id3(tau, X) + tau.tauID(X%\"VLoose\") + tau.tauID(X%\"VTight\") def id6(tau,X):", "= True) event.selectedTaus.sort(key = lambda l : l.pt(), reverse =", "event.inclusiveTaus = [] event.selectedTaus = [] event.otherTaus = [] #get", "other taus') def matchTaus(self, event): match = matchObjectCollection3(event.inclusiveTaus, event.gentaus, deltaRMax", "if self.cfg_ana.inclusive_vetoLeptonsPOG: if not tau.tauID(self.cfg_ana.inclusive_tauAntiMuonID): tau.inclusive_lepVeto = True if not", "event.gentaus, deltaRMax = 0.5) for lep in event.inclusiveTaus: gen =", "#get all alltaus = map( Tau, self.handles['taus'].product() ) #make inclusive", "AutoHandle from PhysicsTools.Heppy.physicsobjects.Tau import Tau from PhysicsTools.HeppyCore.utils.deltar import deltaR, matchObjectCollection3", ": l.pt(), reverse = True) self.counters.counter('events').inc('all events') if len(event.inclusiveTaus): self.counters.counter('events').inc('has", "Tau from PhysicsTools.HeppyCore.utils.deltar import deltaR, matchObjectCollection3 import PhysicsTools.HeppyCore.framework.config as cfg", "= id3(tau, \"by%sCombinedIsolationDeltaBetaCorr3Hits\") tau.idAntiMu = tau.tauID(\"againstMuonLoose3\") + tau.tauID(\"againstMuonTight3\") tau.idAntiE =", "tau.loose_lepVeto = True if tau.tauID(self.cfg_ana.loose_decayModeID) and \\ tau.pt() > self.cfg_ana.loose_ptMin", "self.cfg_ana.inclusive_etaMax: continue if abs(tau.dxy()) > self.cfg_ana.inclusive_dxyMax or abs(tau.dz()) > self.cfg_ana.inclusive_dzMax:", "if tau.inclusive_lepVeto: continue if tau.pt() < self.cfg_ana.inclusive_ptMin: continue if abs(tau.eta())", "= 1000., loose_dzMax = 0.2, loose_vetoLeptons = True, loose_leptonVetoDR =", "tight)\"\"\" return id5(tau, X) + tau.tauID(X%\"VVTight\") tau.idMVA = id6(tau, \"by%sIsolationMVArun2v1DBoldDMwLT\")", "looperName ): super(TauAnalyzer,self).__init__(cfg_ana,cfg_comp,looperName) #---------------------------------------- # DECLARATION OF HANDLES OF LEPTONS", "tau.loose_lepVeto = True if not tau.tauID(self.cfg_ana.loose_tauAntiElectronID): tau.loose_lepVeto = True if", "the following two IDs are required loose_tauAntiMuonID = \"againstMuonLoose3\", loose_tauAntiElectronID", "at preselection') count.register('has >=1 selected taus') count.register('has >=1 other taus')", "definitions of the tau ID strings here: # http://cmslxr.fnal.gov/lxr/source/PhysicsTools/PatAlgos/python/producersLayer1/tauProducer_cfi.py setattr(TauAnalyzer,\"defaultConfig\",cfg.Analyzer(", "tau.idMVA = id6(tau, \"by%sIsolationMVArun2v1DBoldDMwLT\") tau.idMVANewDM = id6(tau, \"by%sIsolationMVArun2v1DBnewDMwLT\") tau.idCI3hit =", "selection inclusive_ptMin = 18, inclusive_etaMax = 9999, inclusive_dxyMax = 1000.," ]
[ "e - (b // a) * d, d) def modInvert(a,b):", "open(\"plaintext.txt\", \"r\") plain1 = plain1File.read().split(\"\\n\")[0] plain2 = plain2File.read().split(\"\\n\")[0] if plain1", "plain2: print(\"Dosyalar Özdeştir..\") else: print(\"Dosyalar özdeş değildir..\") n = int(input(\"Oluşturulmak", "işlemi yapılamz. Lütfen önce Keygen fonksiyonunu çalıştırın.\") else: privateKeyFile =", "while True: u = random.randrange(1, int(N)) if math.gcd(y, N) ==", "else: privateKeyFile = open(privatekeytxt, \"r\") phi, x, N = privateKeyFile.read().split(\"\\n\")", "return 1 for i in range(s): a = random.randrange(2, p-2)", "random.randrange(1, N) if math.gcd(y, N) == 1: x = pow(y,", "randomInteger(n//2) if RabinMiller(q) and math.gcd(r, int(q - 1)) == 1:", "= 3271 def egcd(a,b): if(a == 0): return(b,0,1) else: c,d,e", "privatekeytxt): try: open(privatekeytxt, \"r\") except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan deşifreleme", "def egcd(a,b): if(a == 0): return(b,0,1) else: c,d,e = egcd(b", "% a, a) return(c, e - (b // a) *", "return 1 if not (f & 1): return 0 p", "1: break publicKeyFile = open(\"publickey.txt\", \"w+\") publicKeyFile.write(str(N) + \"\\n\" +", "math import random r = 3271 def egcd(a,b): if(a ==", "= open(plaintext, \"r\") plainCopy = int(plainTextFile.read().split(\"\\n\")[0]) plainTextFile.close() while True: u", "== 0 and RabinMiller(p) and math.gcd(r, int((p - 1) /", "0 return 1 for i in range(s): a = random.randrange(2,", "* r, f-1) if z == p: return 0 return", "def randomInteger(n): return random.randrange(2 ** (n-1), 2 ** n) |", "open(privatekeytxt, \"r\") except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan deşifreleme işlemi yapılamz.", "Keygen fonksiyonunu çalıştırın.\") else: privateKeyFile = open(privatekeytxt, \"r\") phi, x,", "open(\"ciphertext.txt\", \"w+\") cipherTextFile.write(str(cipherText)) cipherTextFile.close() def decrypt(ciphertext, privatekeytxt): try: open(privatekeytxt, \"r\")", "+ str(x) + \"\\n\" + str(N)) privateKeyFile.close() def encrypt(plaintext, publickeytxt):", "= plain1File.read().split(\"\\n\")[0] plain2 = plain2File.read().split(\"\\n\")[0] if plain1 == plain2: print(\"Dosyalar", "True: u = random.randrange(1, int(N)) if math.gcd(y, N) == 1:", "str(x) + \"\\n\" + str(N)) privateKeyFile.close() def encrypt(plaintext, publickeytxt): try:", "while True: y = random.randrange(1, N) if math.gcd(y, N) ==", "p = f-1 u = 0 r = f-1 while", "= open(\"privatekey.txt\", \"w+\") privateKeyFile.write(str(phi) + \"\\n\" + str(x) + \"\\n\"", "plain1File.read().split(\"\\n\")[0] plain2 = plain2File.read().split(\"\\n\")[0] if plain1 == plain2: print(\"Dosyalar Özdeştir..\")", "plain2 = plain2File.read().split(\"\\n\")[0] if plain1 == plain2: print(\"Dosyalar Özdeştir..\") else:", "N = int(phi), int(x), int(N) privateKeyFile.close() cipherTextFile = open(ciphertext, \"r\")", "random.randrange(2, p-2) if Control(a): return 0 return 1 def Keygen(n):", "pow(u, r, N) % N cipherTextFile = open(\"ciphertext.txt\", \"w+\") cipherTextFile.write(str(cipherText))", "return 0 p = f-1 u = 0 r =", "N = p * q phi = (p - 1)", "r >>= 1 u+=1 def Control(a): z = pow(a, r,", "phi = (p - 1) * (q - 1) while", "r = f-1 while (r%2 == 0): r >>= 1", "math.gcd(r, int((p - 1) / r)) == 1: break while", "(2**i) * r, f-1) if z == p: return 0", "modInvert(r, N) % N, N) if x != 1: break", "= pow(y, plainCopy, N) * pow(u, r, N) % N", "return(c, e - (b // a) * d, d) def", "for i in range(s): a = random.randrange(2, p-2) if Control(a):", "i in range(s): a = random.randrange(2, p-2) if Control(a): return", "1)) == 1: break N = p * q phi", "c != 1: raise Exception('moduler ters bulunamadi') else: return d", "plainCopy = int(plainTextFile.read().split(\"\\n\")[0]) plainTextFile.close() while True: u = random.randrange(1, int(N))", "0 r = f-1 while (r%2 == 0): r >>=", "1) / r)) == 1: break while True: q =", "open(\"publickey.txt\", \"w+\") publicKeyFile.write(str(N) + \"\\n\" + str(y)) publicKeyFile.close() privateKeyFile =", "çiftleri oluşturulmadan deşifreleme işlemi yapılamz. Lütfen önce Keygen fonksiyonunu çalıştırın.\")", "i, N) == a): break plainText2File = open(\"plaintext2.txt\", \"w+\") plainText2File.write(str(i))", "\"r\") except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan şifrelme işlemi yapılamaz. Lütfen", "= privateKeyFile.read().split(\"\\n\") phi, x, N = int(phi), int(x), int(N) privateKeyFile.close()", "* d, d) def modInvert(a,b): c,d,e = egcd(a,b) if c", "str(N)) privateKeyFile.close() def encrypt(plaintext, publickeytxt): try: open(publickeytxt, \"r\") except FileNotFoundError:", "u = random.randrange(1, int(N)) if math.gcd(y, N) == 1: break", "raise Exception('moduler ters bulunamadi') else: return d % b def", "modInvert(a,b): c,d,e = egcd(a,b) if c != 1: raise Exception('moduler", "% N, N) for i in range(r -1): if(pow(x, i,", "privateKeyFile = open(\"privatekey.txt\", \"w+\") privateKeyFile.write(str(phi) + \"\\n\" + str(x) +", "N, N) for i in range(r -1): if(pow(x, i, N)", "privateKeyFile.close() cipherTextFile = open(ciphertext, \"r\") cipherCopy = int(cipherTextFile.read()) a =", "publicKeyFile.close() privateKeyFile = open(\"privatekey.txt\", \"w+\") privateKeyFile.write(str(phi) + \"\\n\" + str(x)", "x = pow(y, phi * modInvert(r, N) % N, N)", "Control(a): z = pow(a, r, f) if z == 1:", "- 1)) == 1: break N = p * q", "plain1 == plain2: print(\"Dosyalar Özdeştir..\") else: print(\"Dosyalar özdeş değildir..\") n", "and RabinMiller(p) and math.gcd(r, int((p - 1) / r)) ==", "plainCopy, N) * pow(u, r, N) % N cipherTextFile =", "= open(ciphertext, \"r\") cipherCopy = int(cipherTextFile.read()) a = pow(cipherCopy, (phi", "= open(\"ciphertext.txt\", \"w+\") cipherTextFile.write(str(cipherText)) cipherTextFile.close() def decrypt(ciphertext, privatekeytxt): try: open(privatekeytxt,", "!= 1: raise Exception('moduler ters bulunamadi') else: return d %", "- 1) % r == 0 and RabinMiller(p) and math.gcd(r,", "\"r\") phi, x, N = privateKeyFile.read().split(\"\\n\") phi, x, N =", "= open(\"plaintext.txt\", \"r\") plain1 = plain1File.read().split(\"\\n\")[0] plain2 = plain2File.read().split(\"\\n\")[0] if", "= pow(y, phi * modInvert(r, N) % N, N) if", "N) % N, N) if x != 1: break publicKeyFile", "= int(y) publicKeyFile.close() plainTextFile = open(plaintext, \"r\") plainCopy = int(plainTextFile.read().split(\"\\n\")[0])", "open(publickeytxt, \"r\") N, y = publicKeyFile.read().split(\"\\n\") N = int(N) y", "egcd(b % a, a) return(c, e - (b // a)", "publickeytxt): try: open(publickeytxt, \"r\") except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan şifrelme", "pow(cipherCopy, (phi * modInvert(r, N)) % N, N) for i", "FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan deşifreleme işlemi yapılamz. Lütfen önce Keygen", "\"w+\") cipherTextFile.write(str(cipherText)) cipherTextFile.close() def decrypt(ciphertext, privatekeytxt): try: open(privatekeytxt, \"r\") except", "d % b def randomInteger(n): return random.randrange(2 ** (n-1), 2", "N) for i in range(r -1): if(pow(x, i, N) ==", "str(y)) publicKeyFile.close() privateKeyFile = open(\"privatekey.txt\", \"w+\") privateKeyFile.write(str(phi) + \"\\n\" +", "a, a) return(c, e - (b // a) * d,", "privateKeyFile.write(str(phi) + \"\\n\" + str(x) + \"\\n\" + str(N)) privateKeyFile.close()", "plainText2File.write(str(i)) plainText2File.close() plain2File = open(\"plaintext2.txt\", \"r\") plain1File = open(\"plaintext.txt\", \"r\")", "import math import random r = 3271 def egcd(a,b): if(a", "else: return d % b def randomInteger(n): return random.randrange(2 **", "\"r\") except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan deşifreleme işlemi yapılamz. Lütfen", "fonksiyonunu çalıştırın.\") else: publicKeyFile = open(publickeytxt, \"r\") N, y =", "RabinMiller(q) and math.gcd(r, int(q - 1)) == 1: break N", "def decrypt(ciphertext, privatekeytxt): try: open(privatekeytxt, \"r\") except FileNotFoundError: print(\"Anahtar çiftleri", "== 2): return 1 if not (f & 1): return", "1 def RabinMiller(f): s = 5 if(f == 2): return", "çalıştırın.\") else: publicKeyFile = open(publickeytxt, \"r\") N, y = publicKeyFile.read().split(\"\\n\")", "not (f & 1): return 0 p = f-1 u", "open(\"privatekey.txt\", \"w+\") privateKeyFile.write(str(phi) + \"\\n\" + str(x) + \"\\n\" +", "0 for i in range(u): z = pow(a, (2**i) *", "N) == 1: x = pow(y, phi * modInvert(r, N)", "True: q = randomInteger(n//2) if RabinMiller(q) and math.gcd(r, int(q -", "return d % b def randomInteger(n): return random.randrange(2 ** (n-1),", "pow(a, (2**i) * r, f-1) if z == p: return", "cipherText = pow(y, plainCopy, N) * pow(u, r, N) %", "Exception('moduler ters bulunamadi') else: return d % b def randomInteger(n):", "def Control(a): z = pow(a, r, f) if z ==", "int(q - 1)) == 1: break N = p *", "r, f) if z == 1: return 0 for i", "random.randrange(2 ** (n-1), 2 ** n) | 1 def RabinMiller(f):", "ters bulunamadi') else: return d % b def randomInteger(n): return", "== plain2: print(\"Dosyalar Özdeştir..\") else: print(\"Dosyalar özdeş değildir..\") n =", "- 1) / r)) == 1: break while True: q", "u = 0 r = f-1 while (r%2 == 0):", "N) % N cipherTextFile = open(\"ciphertext.txt\", \"w+\") cipherTextFile.write(str(cipherText)) cipherTextFile.close() def", "2): return 1 if not (f & 1): return 0", "try: open(publickeytxt, \"r\") except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan şifrelme işlemi", "3271 def egcd(a,b): if(a == 0): return(b,0,1) else: c,d,e =", "\"w+\") privateKeyFile.write(str(phi) + \"\\n\" + str(x) + \"\\n\" + str(N))", "int(N) privateKeyFile.close() cipherTextFile = open(ciphertext, \"r\") cipherCopy = int(cipherTextFile.read()) a", "return random.randrange(2 ** (n-1), 2 ** n) | 1 def", "z = pow(a, (2**i) * r, f-1) if z ==", "return 0 for i in range(u): z = pow(a, (2**i)", "if Control(a): return 0 return 1 def Keygen(n): while True:", "random.randrange(1, int(N)) if math.gcd(y, N) == 1: break cipherText =", "f) if z == 1: return 0 for i in", "1: break N = p * q phi = (p", "and math.gcd(r, int(q - 1)) == 1: break N =", "/ r)) == 1: break while True: q = randomInteger(n//2)", "x, N = int(phi), int(x), int(N) privateKeyFile.close() cipherTextFile = open(ciphertext,", "== 0): r >>= 1 u+=1 def Control(a): z =", "egcd(a,b): if(a == 0): return(b,0,1) else: c,d,e = egcd(b %", "r == 0 and RabinMiller(p) and math.gcd(r, int((p - 1)", "** n) | 1 def RabinMiller(f): s = 5 if(f", "şifrelme işlemi yapılamaz. Lütfen önce Keygen fonksiyonunu çalıştırın.\") else: publicKeyFile", "r = 3271 def egcd(a,b): if(a == 0): return(b,0,1) else:", "1 u+=1 def Control(a): z = pow(a, r, f) if", "r, N) % N cipherTextFile = open(\"ciphertext.txt\", \"w+\") cipherTextFile.write(str(cipherText)) cipherTextFile.close()", "deşifreleme işlemi yapılamz. Lütfen önce Keygen fonksiyonunu çalıştırın.\") else: privateKeyFile", "q = randomInteger(n//2) if RabinMiller(q) and math.gcd(r, int(q - 1))", "0 p = f-1 u = 0 r = f-1", "plain2File.read().split(\"\\n\")[0] if plain1 == plain2: print(\"Dosyalar Özdeştir..\") else: print(\"Dosyalar özdeş", "0): return(b,0,1) else: c,d,e = egcd(b % a, a) return(c,", "c,d,e = egcd(a,b) if c != 1: raise Exception('moduler ters", "def RabinMiller(f): s = 5 if(f == 2): return 1", "p: return 0 return 1 for i in range(s): a", "// a) * d, d) def modInvert(a,b): c,d,e = egcd(a,b)", "= egcd(a,b) if c != 1: raise Exception('moduler ters bulunamadi')", "- 1) while True: y = random.randrange(1, N) if math.gcd(y,", "= egcd(b % a, a) return(c, e - (b //", "\"r\") cipherCopy = int(cipherTextFile.read()) a = pow(cipherCopy, (phi * modInvert(r,", "break cipherText = pow(y, plainCopy, N) * pow(u, r, N)", "cipherTextFile.write(str(cipherText)) cipherTextFile.close() def decrypt(ciphertext, privatekeytxt): try: open(privatekeytxt, \"r\") except FileNotFoundError:", "% N cipherTextFile = open(\"ciphertext.txt\", \"w+\") cipherTextFile.write(str(cipherText)) cipherTextFile.close() def decrypt(ciphertext,", "int(x), int(N) privateKeyFile.close() cipherTextFile = open(ciphertext, \"r\") cipherCopy = int(cipherTextFile.read())", "değildir..\") n = int(input(\"Oluşturulmak istenen anahtar çiftlerinin bit uzunluğunu girin:", "if z == 1: return 0 for i in range(u):", "(b // a) * d, d) def modInvert(a,b): c,d,e =", "x != 1: break publicKeyFile = open(\"publickey.txt\", \"w+\") publicKeyFile.write(str(N) +", "Lütfen önce Keygen fonksiyonunu çalıştırın.\") else: publicKeyFile = open(publickeytxt, \"r\")", "5 if(f == 2): return 1 if not (f &", "publicKeyFile = open(\"publickey.txt\", \"w+\") publicKeyFile.write(str(N) + \"\\n\" + str(y)) publicKeyFile.close()", "Keygen(n): while True: p = randomInteger(n//2) if (p - 1)", "plain1 = plain1File.read().split(\"\\n\")[0] plain2 = plain2File.read().split(\"\\n\")[0] if plain1 == plain2:", "fonksiyonunu çalıştırın.\") else: privateKeyFile = open(privatekeytxt, \"r\") phi, x, N", "N) == 1: break cipherText = pow(y, plainCopy, N) *", "a) * d, d) def modInvert(a,b): c,d,e = egcd(a,b) if", "if (p - 1) % r == 0 and RabinMiller(p)", "True: p = randomInteger(n//2) if (p - 1) % r", "pow(a, r, f) if z == 1: return 0 for", "1) * (q - 1) while True: y = random.randrange(1,", "= open(\"plaintext2.txt\", \"r\") plain1File = open(\"plaintext.txt\", \"r\") plain1 = plain1File.read().split(\"\\n\")[0]", "if(a == 0): return(b,0,1) else: c,d,e = egcd(b % a,", "= randomInteger(n//2) if (p - 1) % r == 0", "p * q phi = (p - 1) * (q", "N = privateKeyFile.read().split(\"\\n\") phi, x, N = int(phi), int(x), int(N)", "egcd(a,b) if c != 1: raise Exception('moduler ters bulunamadi') else:", "open(privatekeytxt, \"r\") phi, x, N = privateKeyFile.read().split(\"\\n\") phi, x, N", "q phi = (p - 1) * (q - 1)", "plainTextFile.close() while True: u = random.randrange(1, int(N)) if math.gcd(y, N)", "else: print(\"Dosyalar özdeş değildir..\") n = int(input(\"Oluşturulmak istenen anahtar çiftlerinin", "a = pow(cipherCopy, (phi * modInvert(r, N)) % N, N)", "int((p - 1) / r)) == 1: break while True:", "yapılamz. Lütfen önce Keygen fonksiyonunu çalıştırın.\") else: privateKeyFile = open(privatekeytxt,", "% r == 0 and RabinMiller(p) and math.gcd(r, int((p -", "if(f == 2): return 1 if not (f & 1):", "x, N = privateKeyFile.read().split(\"\\n\") phi, x, N = int(phi), int(x),", "* q phi = (p - 1) * (q -", "çiftleri oluşturulmadan şifrelme işlemi yapılamaz. Lütfen önce Keygen fonksiyonunu çalıştırın.\")", "2 ** n) | 1 def RabinMiller(f): s = 5", "+ \"\\n\" + str(x) + \"\\n\" + str(N)) privateKeyFile.close() def", "+ \"\\n\" + str(N)) privateKeyFile.close() def encrypt(plaintext, publickeytxt): try: open(publickeytxt,", "* (q - 1) while True: y = random.randrange(1, N)", "Keygen fonksiyonunu çalıştırın.\") else: publicKeyFile = open(publickeytxt, \"r\") N, y", "a): break plainText2File = open(\"plaintext2.txt\", \"w+\") plainText2File.write(str(i)) plainText2File.close() plain2File =", "cipherTextFile = open(ciphertext, \"r\") cipherCopy = int(cipherTextFile.read()) a = pow(cipherCopy,", "Özdeştir..\") else: print(\"Dosyalar özdeş değildir..\") n = int(input(\"Oluşturulmak istenen anahtar", "* modInvert(r, N)) % N, N) for i in range(r", "y = int(y) publicKeyFile.close() plainTextFile = open(plaintext, \"r\") plainCopy =", "r, f-1) if z == p: return 0 return 1", "def Keygen(n): while True: p = randomInteger(n//2) if (p -", "n) | 1 def RabinMiller(f): s = 5 if(f ==", "for i in range(r -1): if(pow(x, i, N) == a):", "& 1): return 0 p = f-1 u = 0", "= random.randrange(2, p-2) if Control(a): return 0 return 1 def", "(q - 1) while True: y = random.randrange(1, N) if", "return 1 def Keygen(n): while True: p = randomInteger(n//2) if", "== p: return 0 return 1 for i in range(s):", "plainText2File.close() plain2File = open(\"plaintext2.txt\", \"r\") plain1File = open(\"plaintext.txt\", \"r\") plain1", "try: open(privatekeytxt, \"r\") except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan deşifreleme işlemi", "0): r >>= 1 u+=1 def Control(a): z = pow(a,", "(p - 1) * (q - 1) while True: y", "return 0 return 1 def Keygen(n): while True: p =", "def encrypt(plaintext, publickeytxt): try: open(publickeytxt, \"r\") except FileNotFoundError: print(\"Anahtar çiftleri", "plainText2File = open(\"plaintext2.txt\", \"w+\") plainText2File.write(str(i)) plainText2File.close() plain2File = open(\"plaintext2.txt\", \"r\")", "b def randomInteger(n): return random.randrange(2 ** (n-1), 2 ** n)", "modInvert(r, N)) % N, N) for i in range(r -1):", "1: return 0 for i in range(u): z = pow(a,", "z == p: return 0 return 1 for i in", "-1): if(pow(x, i, N) == a): break plainText2File = open(\"plaintext2.txt\",", "= open(publickeytxt, \"r\") N, y = publicKeyFile.read().split(\"\\n\") N = int(N)", "if(pow(x, i, N) == a): break plainText2File = open(\"plaintext2.txt\", \"w+\")", "- (b // a) * d, d) def modInvert(a,b): c,d,e", "if math.gcd(y, N) == 1: x = pow(y, phi *", "1: break cipherText = pow(y, plainCopy, N) * pow(u, r,", "u+=1 def Control(a): z = pow(a, r, f) if z", "\"r\") plain1File = open(\"plaintext.txt\", \"r\") plain1 = plain1File.read().split(\"\\n\")[0] plain2 =", "while (r%2 == 0): r >>= 1 u+=1 def Control(a):", "= (p - 1) * (q - 1) while True:", "math.gcd(y, N) == 1: break cipherText = pow(y, plainCopy, N)", "N cipherTextFile = open(\"ciphertext.txt\", \"w+\") cipherTextFile.write(str(cipherText)) cipherTextFile.close() def decrypt(ciphertext, privatekeytxt):", "= 5 if(f == 2): return 1 if not (f", "1 def Keygen(n): while True: p = randomInteger(n//2) if (p", "d, d) def modInvert(a,b): c,d,e = egcd(a,b) if c !=", "<NAME> 170401038 import math import random r = 3271 def", "N) == a): break plainText2File = open(\"plaintext2.txt\", \"w+\") plainText2File.write(str(i)) plainText2File.close()", "except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan deşifreleme işlemi yapılamz. Lütfen önce", "= open(\"publickey.txt\", \"w+\") publicKeyFile.write(str(N) + \"\\n\" + str(y)) publicKeyFile.close() privateKeyFile", "decrypt(ciphertext, privatekeytxt): try: open(privatekeytxt, \"r\") except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan", "\"w+\") plainText2File.write(str(i)) plainText2File.close() plain2File = open(\"plaintext2.txt\", \"r\") plain1File = open(\"plaintext.txt\",", "= int(phi), int(x), int(N) privateKeyFile.close() cipherTextFile = open(ciphertext, \"r\") cipherCopy", "= pow(cipherCopy, (phi * modInvert(r, N)) % N, N) for", "170401038 import math import random r = 3271 def egcd(a,b):", "== a): break plainText2File = open(\"plaintext2.txt\", \"w+\") plainText2File.write(str(i)) plainText2File.close() plain2File", "True: y = random.randrange(1, N) if math.gcd(y, N) == 1:", ">>= 1 u+=1 def Control(a): z = pow(a, r, f)", "if math.gcd(y, N) == 1: break cipherText = pow(y, plainCopy,", "privateKeyFile.read().split(\"\\n\") phi, x, N = int(phi), int(x), int(N) privateKeyFile.close() cipherTextFile", "bulunamadi') else: return d % b def randomInteger(n): return random.randrange(2", "import random r = 3271 def egcd(a,b): if(a == 0):", "i in range(u): z = pow(a, (2**i) * r, f-1)", "1): return 0 p = f-1 u = 0 r", "and math.gcd(r, int((p - 1) / r)) == 1: break", "(f & 1): return 0 p = f-1 u =", "open(\"plaintext2.txt\", \"r\") plain1File = open(\"plaintext.txt\", \"r\") plain1 = plain1File.read().split(\"\\n\")[0] plain2", "1 if not (f & 1): return 0 p =", "publicKeyFile.read().split(\"\\n\") N = int(N) y = int(y) publicKeyFile.close() plainTextFile =", "= publicKeyFile.read().split(\"\\n\") N = int(N) y = int(y) publicKeyFile.close() plainTextFile", "Lütfen önce Keygen fonksiyonunu çalıştırın.\") else: privateKeyFile = open(privatekeytxt, \"r\")", "1: x = pow(y, phi * modInvert(r, N) % N,", "0 and RabinMiller(p) and math.gcd(r, int((p - 1) / r))", "open(\"plaintext2.txt\", \"w+\") plainText2File.write(str(i)) plainText2File.close() plain2File = open(\"plaintext2.txt\", \"r\") plain1File =", "= 0 r = f-1 while (r%2 == 0): r", "else: publicKeyFile = open(publickeytxt, \"r\") N, y = publicKeyFile.read().split(\"\\n\") N", "in range(s): a = random.randrange(2, p-2) if Control(a): return 0", "N) if x != 1: break publicKeyFile = open(\"publickey.txt\", \"w+\")", "== 0): return(b,0,1) else: c,d,e = egcd(b % a, a)", "publicKeyFile.close() plainTextFile = open(plaintext, \"r\") plainCopy = int(plainTextFile.read().split(\"\\n\")[0]) plainTextFile.close() while", "open(plaintext, \"r\") plainCopy = int(plainTextFile.read().split(\"\\n\")[0]) plainTextFile.close() while True: u =", "break publicKeyFile = open(\"publickey.txt\", \"w+\") publicKeyFile.write(str(N) + \"\\n\" + str(y))", "int(N)) if math.gcd(y, N) == 1: break cipherText = pow(y,", "plain1File = open(\"plaintext.txt\", \"r\") plain1 = plain1File.read().split(\"\\n\")[0] plain2 = plain2File.read().split(\"\\n\")[0]", "çalıştırın.\") else: privateKeyFile = open(privatekeytxt, \"r\") phi, x, N =", "privateKeyFile = open(privatekeytxt, \"r\") phi, x, N = privateKeyFile.read().split(\"\\n\") phi,", "cipherTextFile = open(\"ciphertext.txt\", \"w+\") cipherTextFile.write(str(cipherText)) cipherTextFile.close() def decrypt(ciphertext, privatekeytxt): try:", "yapılamaz. Lütfen önce Keygen fonksiyonunu çalıştırın.\") else: publicKeyFile = open(publickeytxt,", "c,d,e = egcd(b % a, a) return(c, e - (b", "\"r\") plain1 = plain1File.read().split(\"\\n\")[0] plain2 = plain2File.read().split(\"\\n\")[0] if plain1 ==", "return(b,0,1) else: c,d,e = egcd(b % a, a) return(c, e", "1 for i in range(s): a = random.randrange(2, p-2) if", "\"r\") plainCopy = int(plainTextFile.read().split(\"\\n\")[0]) plainTextFile.close() while True: u = random.randrange(1,", "1: raise Exception('moduler ters bulunamadi') else: return d % b", "p-2) if Control(a): return 0 return 1 def Keygen(n): while", "= randomInteger(n//2) if RabinMiller(q) and math.gcd(r, int(q - 1)) ==", "if x != 1: break publicKeyFile = open(\"publickey.txt\", \"w+\") publicKeyFile.write(str(N)", "= int(N) y = int(y) publicKeyFile.close() plainTextFile = open(plaintext, \"r\")", "Control(a): return 0 return 1 def Keygen(n): while True: p", "publicKeyFile = open(publickeytxt, \"r\") N, y = publicKeyFile.read().split(\"\\n\") N =", "return 0 return 1 for i in range(s): a =", "== 1: x = pow(y, phi * modInvert(r, N) %", "y = random.randrange(1, N) if math.gcd(y, N) == 1: x", "istenen anahtar çiftlerinin bit uzunluğunu girin: \")) Keygen(n) encrypt(\"plaintext.txt\",\"publickey.txt\") decrypt(\"ciphertext.txt\",", "f-1) if z == p: return 0 return 1 for", "math.gcd(y, N) == 1: x = pow(y, phi * modInvert(r,", "else: c,d,e = egcd(b % a, a) return(c, e -", "f-1 while (r%2 == 0): r >>= 1 u+=1 def", "print(\"Anahtar çiftleri oluşturulmadan deşifreleme işlemi yapılamz. Lütfen önce Keygen fonksiyonunu", "\"r\") N, y = publicKeyFile.read().split(\"\\n\") N = int(N) y =", "oluşturulmadan şifrelme işlemi yapılamaz. Lütfen önce Keygen fonksiyonunu çalıştırın.\") else:", "- 1) * (q - 1) while True: y =", "1) % r == 0 and RabinMiller(p) and math.gcd(r, int((p", "RabinMiller(p) and math.gcd(r, int((p - 1) / r)) == 1:", "int(phi), int(x), int(N) privateKeyFile.close() cipherTextFile = open(ciphertext, \"r\") cipherCopy =", "n = int(input(\"Oluşturulmak istenen anahtar çiftlerinin bit uzunluğunu girin: \"))", "= int(plainTextFile.read().split(\"\\n\")[0]) plainTextFile.close() while True: u = random.randrange(1, int(N)) if", "FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan şifrelme işlemi yapılamaz. Lütfen önce Keygen", "1: break while True: q = randomInteger(n//2) if RabinMiller(q) and", "cipherCopy = int(cipherTextFile.read()) a = pow(cipherCopy, (phi * modInvert(r, N))", "break plainText2File = open(\"plaintext2.txt\", \"w+\") plainText2File.write(str(i)) plainText2File.close() plain2File = open(\"plaintext2.txt\",", "if plain1 == plain2: print(\"Dosyalar Özdeştir..\") else: print(\"Dosyalar özdeş değildir..\")", "int(y) publicKeyFile.close() plainTextFile = open(plaintext, \"r\") plainCopy = int(plainTextFile.read().split(\"\\n\")[0]) plainTextFile.close()", "for i in range(u): z = pow(a, (2**i) * r,", "phi, x, N = privateKeyFile.read().split(\"\\n\") phi, x, N = int(phi),", "in range(r -1): if(pow(x, i, N) == a): break plainText2File", "# <NAME> 170401038 import math import random r = 3271", "int(plainTextFile.read().split(\"\\n\")[0]) plainTextFile.close() while True: u = random.randrange(1, int(N)) if math.gcd(y,", "\"\\n\" + str(x) + \"\\n\" + str(N)) privateKeyFile.close() def encrypt(plaintext,", "1) while True: y = random.randrange(1, N) if math.gcd(y, N)", "= random.randrange(1, N) if math.gcd(y, N) == 1: x =", "def modInvert(a,b): c,d,e = egcd(a,b) if c != 1: raise", "N, N) if x != 1: break publicKeyFile = open(\"publickey.txt\",", "= p * q phi = (p - 1) *", "= plain2File.read().split(\"\\n\")[0] if plain1 == plain2: print(\"Dosyalar Özdeştir..\") else: print(\"Dosyalar", "oluşturulmadan deşifreleme işlemi yapılamz. Lütfen önce Keygen fonksiyonunu çalıştırın.\") else:", "+ \"\\n\" + str(y)) publicKeyFile.close() privateKeyFile = open(\"privatekey.txt\", \"w+\") privateKeyFile.write(str(phi)", "= pow(a, (2**i) * r, f-1) if z == p:", "range(u): z = pow(a, (2**i) * r, f-1) if z", "(phi * modInvert(r, N)) % N, N) for i in", "random r = 3271 def egcd(a,b): if(a == 0): return(b,0,1)", "range(s): a = random.randrange(2, p-2) if Control(a): return 0 return", "while True: q = randomInteger(n//2) if RabinMiller(q) and math.gcd(r, int(q", "= f-1 u = 0 r = f-1 while (r%2", "r)) == 1: break while True: q = randomInteger(n//2) if", "s = 5 if(f == 2): return 1 if not", "== 1: break N = p * q phi =", "p = randomInteger(n//2) if (p - 1) % r ==", "N) if math.gcd(y, N) == 1: x = pow(y, phi", "önce Keygen fonksiyonunu çalıştırın.\") else: privateKeyFile = open(privatekeytxt, \"r\") phi,", "phi * modInvert(r, N) % N, N) if x !=", "= random.randrange(1, int(N)) if math.gcd(y, N) == 1: break cipherText", "** (n-1), 2 ** n) | 1 def RabinMiller(f): s", "open(publickeytxt, \"r\") except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan şifrelme işlemi yapılamaz.", "% b def randomInteger(n): return random.randrange(2 ** (n-1), 2 **", "= f-1 while (r%2 == 0): r >>= 1 u+=1", "= open(\"plaintext2.txt\", \"w+\") plainText2File.write(str(i)) plainText2File.close() plain2File = open(\"plaintext2.txt\", \"r\") plain1File", "while True: p = randomInteger(n//2) if (p - 1) %", "özdeş değildir..\") n = int(input(\"Oluşturulmak istenen anahtar çiftlerinin bit uzunluğunu", "= int(input(\"Oluşturulmak istenen anahtar çiftlerinin bit uzunluğunu girin: \")) Keygen(n)", "N, y = publicKeyFile.read().split(\"\\n\") N = int(N) y = int(y)", "break N = p * q phi = (p -", "+ str(N)) privateKeyFile.close() def encrypt(plaintext, publickeytxt): try: open(publickeytxt, \"r\") except", "break while True: q = randomInteger(n//2) if RabinMiller(q) and math.gcd(r,", "(n-1), 2 ** n) | 1 def RabinMiller(f): s =", "f-1 u = 0 r = f-1 while (r%2 ==", "randomInteger(n//2) if (p - 1) % r == 0 and", "önce Keygen fonksiyonunu çalıştırın.\") else: publicKeyFile = open(publickeytxt, \"r\") N,", "a) return(c, e - (b // a) * d, d)", "== 1: return 0 for i in range(u): z =", "| 1 def RabinMiller(f): s = 5 if(f == 2):", "z == 1: return 0 for i in range(u): z", "N) * pow(u, r, N) % N cipherTextFile = open(\"ciphertext.txt\",", "z = pow(a, r, f) if z == 1: return", "N)) % N, N) for i in range(r -1): if(pow(x,", "int(input(\"Oluşturulmak istenen anahtar çiftlerinin bit uzunluğunu girin: \")) Keygen(n) encrypt(\"plaintext.txt\",\"publickey.txt\")", "anahtar çiftlerinin bit uzunluğunu girin: \")) Keygen(n) encrypt(\"plaintext.txt\",\"publickey.txt\") decrypt(\"ciphertext.txt\", \"privatekey.txt\")", "range(r -1): if(pow(x, i, N) == a): break plainText2File =", "\"w+\") publicKeyFile.write(str(N) + \"\\n\" + str(y)) publicKeyFile.close() privateKeyFile = open(\"privatekey.txt\",", "* modInvert(r, N) % N, N) if x != 1:", "open(ciphertext, \"r\") cipherCopy = int(cipherTextFile.read()) a = pow(cipherCopy, (phi *", "publicKeyFile.write(str(N) + \"\\n\" + str(y)) publicKeyFile.close() privateKeyFile = open(\"privatekey.txt\", \"w+\")", "if RabinMiller(q) and math.gcd(r, int(q - 1)) == 1: break", "== 1: break while True: q = randomInteger(n//2) if RabinMiller(q)", "print(\"Dosyalar Özdeştir..\") else: print(\"Dosyalar özdeş değildir..\") n = int(input(\"Oluşturulmak istenen", "a = random.randrange(2, p-2) if Control(a): return 0 return 1", "y = publicKeyFile.read().split(\"\\n\") N = int(N) y = int(y) publicKeyFile.close()", "= open(privatekeytxt, \"r\") phi, x, N = privateKeyFile.read().split(\"\\n\") phi, x,", "plainTextFile = open(plaintext, \"r\") plainCopy = int(plainTextFile.read().split(\"\\n\")[0]) plainTextFile.close() while True:", "\"\\n\" + str(N)) privateKeyFile.close() def encrypt(plaintext, publickeytxt): try: open(publickeytxt, \"r\")", "pow(y, phi * modInvert(r, N) % N, N) if x", "phi, x, N = int(phi), int(x), int(N) privateKeyFile.close() cipherTextFile =", "pow(y, plainCopy, N) * pow(u, r, N) % N cipherTextFile", "(p - 1) % r == 0 and RabinMiller(p) and", "encrypt(plaintext, publickeytxt): try: open(publickeytxt, \"r\") except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan", "print(\"Dosyalar özdeş değildir..\") n = int(input(\"Oluşturulmak istenen anahtar çiftlerinin bit", "(r%2 == 0): r >>= 1 u+=1 def Control(a): z", "math.gcd(r, int(q - 1)) == 1: break N = p", "N = int(N) y = int(y) publicKeyFile.close() plainTextFile = open(plaintext,", "* pow(u, r, N) % N cipherTextFile = open(\"ciphertext.txt\", \"w+\")", "= pow(a, r, f) if z == 1: return 0", "if z == p: return 0 return 1 for i", "== 1: break cipherText = pow(y, plainCopy, N) * pow(u,", "işlemi yapılamaz. Lütfen önce Keygen fonksiyonunu çalıştırın.\") else: publicKeyFile =", "in range(u): z = pow(a, (2**i) * r, f-1) if", "except FileNotFoundError: print(\"Anahtar çiftleri oluşturulmadan şifrelme işlemi yapılamaz. Lütfen önce", "cipherTextFile.close() def decrypt(ciphertext, privatekeytxt): try: open(privatekeytxt, \"r\") except FileNotFoundError: print(\"Anahtar", "\"\\n\" + str(y)) publicKeyFile.close() privateKeyFile = open(\"privatekey.txt\", \"w+\") privateKeyFile.write(str(phi) +", "d) def modInvert(a,b): c,d,e = egcd(a,b) if c != 1:", "plain2File = open(\"plaintext2.txt\", \"r\") plain1File = open(\"plaintext.txt\", \"r\") plain1 =", "% N, N) if x != 1: break publicKeyFile =", "0 return 1 def Keygen(n): while True: p = randomInteger(n//2)", "!= 1: break publicKeyFile = open(\"publickey.txt\", \"w+\") publicKeyFile.write(str(N) + \"\\n\"", "+ str(y)) publicKeyFile.close() privateKeyFile = open(\"privatekey.txt\", \"w+\") privateKeyFile.write(str(phi) + \"\\n\"", "randomInteger(n): return random.randrange(2 ** (n-1), 2 ** n) | 1", "privateKeyFile.close() def encrypt(plaintext, publickeytxt): try: open(publickeytxt, \"r\") except FileNotFoundError: print(\"Anahtar", "int(cipherTextFile.read()) a = pow(cipherCopy, (phi * modInvert(r, N)) % N,", "= int(cipherTextFile.read()) a = pow(cipherCopy, (phi * modInvert(r, N)) %", "i in range(r -1): if(pow(x, i, N) == a): break", "print(\"Anahtar çiftleri oluşturulmadan şifrelme işlemi yapılamaz. Lütfen önce Keygen fonksiyonunu", "RabinMiller(f): s = 5 if(f == 2): return 1 if", "if c != 1: raise Exception('moduler ters bulunamadi') else: return", "int(N) y = int(y) publicKeyFile.close() plainTextFile = open(plaintext, \"r\") plainCopy", "if not (f & 1): return 0 p = f-1" ]
[ "as predecessors print networkx.descendants(self.networkx, e) # almost as child_to_parents def", "# write_to_dot def get_subgraph(self, envos=None): \"\"\"Given a list of ENVO", "* https://github.com/biolab/orange-bio * https://bitbucket.org/biolab/orange-bioinformatics To install: $ pip install Orange-Bioinformatics", "= node.attr['name'] node.attr['label'] = text.replace(' ','\\\\n') node.attr['name'] = '' node.attr['shape']", "% (envo, text) for edge in g.edges(): if edge.attr['label'] ==", "matplotlib import pyplot networkx.draw(g) pyplot.savefig(path) pyplot.close() def draw_with_pygraphviz(self, g, path):", "format. Seems like it looses directionality. * https://networkx.readthedocs.org/en/stable/ To install:", "= path # --------------------------- In this section --------------------------- # #", "add_weights(self, g, weights=None): \"\"\"Input a networkx DiGraph object. Outputs a", "assert g.is_strict() return g @property_cached def networkx(self): \"\"\"The network converted", "envos to help test this module # test_envos = [", "\"\"\" def __init__(self, path=None): \"\"\"Give the path to the OBO", "key2:i1:w [color=red]; key:i2:e -> key2:i2:w [color=blue]; key:i3:e -> key2:i3:w [color=turquoise4];", "the different libraries work.\"\"\" # Test node # if e", "path=None): \"\"\"Give the path to the OBO file\"\"\" if path", "to `orange network` format. Doesn't seem to work until they", "handle.write(g.to_string()) def add_legend(self, path): \"\"\"Input the path to a dot", "\"\"\" return self.orange_obo.to_network() @property_cached def pygraphviz(self): \"\"\"The network converted to", "= list(nodes) # Return # return self.networkx.subgraph(nodes) def add_weights(self, g,", "\"\"\"Does the envo term `e` descend from the node `root`?", "dot file.\"\"\" sh.dot(in_path, '-Tpdf', '-o', out_path) # --------------------------- In this", "# print_test # draw_with_networkx # draw_with_pygraphviz def print_test(self, e=None): \"\"\"Just", "handle: handle.write(g.to_string()) def add_legend(self, path): \"\"\"Input the path to a", "return g for envo in weights: node = g.get_node(envo) weight", "test # get_subgraph # add_weights # draw_to_pdf # write_to_dot def", "\"\"\"The network converted to `pygraphviz` format. * http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/ To install:", "\"ENVO:00000475\", ] ################################################################################ class Ontology(object): \"\"\"A object that gives you", "help test this module # test_envos = [ \"ENVO:00000033\", \"ENVO:00000043\",", "return self.orange_obo.to_network() @property_cached def pygraphviz(self): \"\"\"The network converted to `pygraphviz`", "node.attr['name'] node.attr['label'] = text.replace(' ','\\\\n') node.attr['name'] = '' node.attr['shape'] =", "border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td port=\"i1\">a</td></tr> <tr><td port=\"i2\">of</td></tr> <tr><td port=\"i3\">in</td></tr>", "path): \"\"\"Input a networkx DiGraph object.\"\"\" from matplotlib import pyplot", "$ pip install networkx \"\"\" g = self.orange_obo.to_networkx() assert networkx.is_directed_acyclic_graph(g)", "--------------------------- In this section --------------------------- # # test # get_subgraph", "module_dir from seqenv.common.cache import property_cached # Third party modules #", "import module_dir from seqenv.common.cache import property_cached # Third party modules", "= test_envos[0] # Goa # print \"Goa: \" print self.goatools[e]", "obo_parser.GODag(self.path) @property_cached def orange_network(self): \"\"\"The network converted to `orange network`", "key:i2:e -> key2:i2:w [color=blue]; key:i3:e -> key2:i3:w [color=turquoise4]; }\"\"\" orig_txt", "'-Tpdf', '-o', out_path) # --------------------------- In this section --------------------------- #", "print self.networkx.predecessors(e) print networkx.ancestors(self.networkx, e) # same as predecessors print", "orange_network # pygraphviz # networkx @property_cached def orange_obo(self): \"\"\"The ontology", "$ pip install pygraphviz \"\"\" g = self.orange_obo.to_graphviz() assert g.is_directed()", "if line] new_text = '\\n'.join(new_text + orig_txt[2:]) with open(path, 'w')", "isinstance(e, int): e = \"ENVO:%08d\" % e if isinstance(root, int):", "\"networkx: \" print self.networkx[e] print self.networkx.successors(e) print self.networkx.predecessors(e) print networkx.ancestors(self.networkx,", "[line.lstrip() for line in legend_txt.split('\\n') if line] new_text = '\\n'.join(new_text", "nodes weigh more\"; key [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td", "self.pygraphviz.predecessors(e) print self.pygraphviz.get_node(e) # Networkx # import networkx print \"networkx:", "networkx.is_directed_acyclic_graph(g) return g # --------------------------- In this section --------------------------- #", "`orange network` format. Doesn't seem to work until they update", "edge in g.edges(): if edge.attr['label'] == 'located_in': edge.attr['color'] = 'turquoise4'", "</table>>]; key2 [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td port=\"i1\">a</td></tr> <tr><td", "e = \"ENVO:%08d\" % e if isinstance(root, int): root =", "object.\"\"\" from matplotlib import pyplot networkx.draw(g) pyplot.savefig(path) pyplot.close() def draw_with_pygraphviz(self,", "descends def descends(self, e, root): \"\"\"Does the envo term `e`", "if envos is None: envos = test_envos # All nodes", "weights=None): \"\"\"Input a networkx DiGraph object. Outputs a pygraphviz AGraph", "seqenv import module_dir from seqenv.common.cache import property_cached # Third party", "how the different libraries work.\"\"\" # Test node # if", "text) for edge in g.edges(): if edge.attr['label'] == 'located_in': edge.attr['color']", "# if isinstance(e, int): e = \"ENVO:%08d\" % e if", "$ pip install Orange-Bioinformatics \"\"\" from orangecontrib.bio.ontology import OBOOntology return", "g): \"\"\"Input a pygraphviz AGraph object. Outputs a pygraphviz AGraph", "edge.attr['label'] = '' return g def write_to_dot(self, g, path): \"\"\"Input", "* https://bitbucket.org/biolab/orange-bioinformatics To install: $ pip install Orange-Bioinformatics \"\"\" from", "DiGraph object. Outputs a pygraphviz AGraph object.\"\"\" g = networkx.nx_agraph.to_agraph(g)", "import pyplot networkx.draw(g) pyplot.savefig(path) pyplot.close() def draw_with_pygraphviz(self, g, path): \"\"\"Input", "envos is None: envos = test_envos # All nodes #", "port=\"i3\">in</td></tr> </table>>]; key:i1:e -> key2:i1:w [color=red]; key:i2:e -> key2:i2:w [color=blue];", "networkx.ancestors(self.networkx, e) # same as predecessors print networkx.descendants(self.networkx, e) #", "--------------------------- In this section --------------------------- # # print_test # draw_with_networkx", "= 'turquoise4' edge.attr['label'] = '' return g def write_to_dot(self, g,", "nodes = set(n for e in envos for n in", "# Return # return e in networkx.ancestors(self.networkx, root) # ---------------------------", "https://networkx.readthedocs.org/en/stable/ To install: $ pip install networkx \"\"\" g =", "loaded by the `orange` library. * http://orange.biolab.si * http://orange-bioinformatics.readthedocs.org/en/latest/ *", "to each node, uncomment: #envo = node.attr['label'] #node.attr['label'] = \"{<f0>", "`networkx` format. Seems like it looses directionality. * https://networkx.readthedocs.org/en/stable/ To", "file.\"\"\" legend_txt = \"\"\" digraph { rankdir=LR node [shape=plaintext,fontname=\"helvetica\"] subgraph", "= self.orange_obo.to_graphviz() assert g.is_directed() assert g.is_strict() return g @property_cached def", "g.is_directed() assert g.is_strict() return g @property_cached def networkx(self): \"\"\"The network", "module # test_envos = [ \"ENVO:00000033\", \"ENVO:00000043\", \"ENVO:00000067\", \"ENVO:00000143\", \"ENVO:00000210\",", "Seems like it looses directionality. * https://networkx.readthedocs.org/en/stable/ To install: $", "# # test # get_subgraph # add_weights # draw_to_pdf #", "To install: $ pip install networkx \"\"\" g = self.orange_obo.to_networkx()", "e is None: e = test_envos[0] # Goa # print", "To install: $ pip install pygraphviz \"\"\" g = self.orange_obo.to_graphviz()", "nodes = list(nodes) # Return # return self.networkx.subgraph(nodes) def add_weights(self,", "a networkx DiGraph object.\"\"\" from matplotlib import pyplot networkx.draw(g) pyplot.savefig(path)", "print self.networkx[e] print self.networkx.successors(e) print self.networkx.predecessors(e) print networkx.ancestors(self.networkx, e) #", "\"NB: darker nodes weigh more\"; key [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\"", "with nodes and edges) of the ENVO ontology from the", "orangecontrib.bio.ontology import OBOOntology return OBOOntology(self.path) @property_cached def goatools(self): \"\"\"The network", "@property_cached def networkx(self): \"\"\"The network converted to `networkx` format. Seems", "def networkx(self): \"\"\"The network converted to `networkx` format. Seems like", "networkx DiGraph object. Outputs a pygraphviz AGraph object.\"\"\" g =", "with open(path, 'w') as handle: handle.write(g.to_string()) def add_legend(self, path): \"\"\"Input", "section --------------------------- # # orange_obo # goatools # orange_network #", "border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td align=\"right\" port=\"i1\">Is</td></tr> <tr><td align=\"right\" port=\"i2\">Part</td></tr>", "format. * http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/ To install: $ pip install pygraphviz \"\"\"", "e in networkx.ancestors(self.networkx, root) # --------------------------- In this section ---------------------------", "\"\"\"Input a networkx DiGraph object.\"\"\" from matplotlib import pyplot networkx.draw(g)", "edges) of the ENVO ontology from the OBO file's path.", "All nodes # nodes = set(n for e in envos", "a dot file.\"\"\" legend_txt = \"\"\" digraph { rankdir=LR node", "https://github.com/biolab/orange-bio * https://bitbucket.org/biolab/orange-bioinformatics To install: $ pip install Orange-Bioinformatics \"\"\"", "<tr><td align=\"right\" port=\"i2\">Part</td></tr> <tr><td align=\"right\" port=\"i3\">Located</td></tr> </table>>]; key2 [label=<<table border=\"0\"", "\"ENVO:00000143\", \"ENVO:00000210\", \"ENVO:00000215\", \"ENVO:00000475\", ] ################################################################################ class Ontology(object): \"\"\"A object", "\"\"\"Input a networkx DiGraph object. Outputs a pygraphviz AGraph object.\"\"\"", "weights: node = g.get_node(envo) weight = weights[envo] color = matplotlib.colors.rgb2hex((1.0,", "int): root = \"ENVO:%08d\" % root # Return # return", "and edges) of the ENVO ontology from the OBO file's", "print networkx.ancestors(self.networkx, e) # same as predecessors print networkx.descendants(self.networkx, e)", "network converted to `orange network` format. Doesn't seem to work", "(envo, text) for edge in g.edges(): if edge.attr['label'] == 'located_in':", "print self.pygraphviz[e] print self.pygraphviz.successors(e) print self.pygraphviz.predecessors(e) print self.pygraphviz.get_node(e) # Networkx", "path to a dot file.\"\"\" legend_txt = \"\"\" digraph {", "print self.pygraphviz.get_node(e) # Networkx # import networkx print \"networkx: \"", "n in networkx.descendants(self.networkx, e)) nodes.update(envos) nodes = list(nodes) # Return", "Return # return self.networkx.subgraph(nodes) def add_weights(self, g, weights=None): \"\"\"Input a", "import networkx print \"networkx: \" print self.networkx[e] print self.networkx.successors(e) print", "could be added: * graphviz: http://graphviz.readthedocs.org/en/latest/api.html#digraph * pydot: https://github.com/erocarrera/pydot \"\"\"", "__init__(self, path=None): \"\"\"Give the path to the OBO file\"\"\" if", "Testing mode # if envos is None: envos = test_envos", "ontology loaded by the `orange` library. * http://orange.biolab.si * http://orange-bioinformatics.readthedocs.org/en/latest/", "= \"NB: darker nodes weigh more\"; key [label=<<table border=\"0\" cellpadding=\"2\"", "\"\"\"Input a pygraphviz AGraph object.\"\"\" with open(path, 'w') as handle:", "print self.goatools[e] # Pygraphviz # print \"pygraphviz: \" print self.pygraphviz[e]", "work until they update PyPI. * https://bitbucket.org/biolab/orange-network/ * http://orange-network.readthedocs.org/en/latest/ To", "# Test node # if e is None: e =", "print \"Goa: \" print self.goatools[e] # Pygraphviz # print \"pygraphviz:", "cellborder=\"0\"> <tr><td port=\"i1\">a</td></tr> <tr><td port=\"i2\">of</td></tr> <tr><td port=\"i3\">in</td></tr> </table>>]; key:i1:e ->", "self.path = path # --------------------------- In this section --------------------------- #", "test_envos = [ \"ENVO:00000033\", \"ENVO:00000043\", \"ENVO:00000067\", \"ENVO:00000143\", \"ENVO:00000210\", \"ENVO:00000215\", \"ENVO:00000475\",", "from orangecontrib.bio.ontology import OBOOntology return OBOOntology(self.path) @property_cached def goatools(self): \"\"\"The", "e if isinstance(root, int): root = \"ENVO:%08d\" % root #", "# import sh, networkx import matplotlib.colors # A list of", "Outputs a pygraphviz AGraph object.\"\"\" for node in g.nodes(): text", "\"\"\"The network loaded into goatools' format. * https://github.com/tanghaibao/goatools To install:", "g, path): \"\"\"Input a networkx DiGraph object.\"\"\" from matplotlib import", "root): \"\"\"Does the envo term `e` descend from the node", "\"\"\"The network converted to `orange network` format. Doesn't seem to", "pygraphviz AGraph object.\"\"\" g = networkx.nx_agraph.to_agraph(g) if weights is None:", "gives you access to the graph (network with nodes and", "Outputs a pygraphviz AGraph object.\"\"\" g = networkx.nx_agraph.to_agraph(g) if weights", "in g.edges(): if edge.attr['label'] == 'located_in': edge.attr['color'] = 'turquoise4' edge.attr['label']", "handle: handle.write(new_text) def draw_to_pdf(self, in_path, out_path): \"\"\"Input a path to", "this section --------------------------- # # print_test # draw_with_networkx # draw_with_pygraphviz", "print_test(self, e=None): \"\"\"Just a method to see a bit how", "\"Goa: \" print self.goatools[e] # Pygraphviz # print \"pygraphviz: \"", "\"ENVO:00000210\", \"ENVO:00000215\", \"ENVO:00000475\", ] ################################################################################ class Ontology(object): \"\"\"A object that", "In this section --------------------------- # # descends def descends(self, e,", "* https://bitbucket.org/biolab/orange-network/ * http://orange-network.readthedocs.org/en/latest/ To install: $ pip install orange-network", "for n in networkx.descendants(self.networkx, e)) nodes.update(envos) nodes = list(nodes) #", "* pydot: https://github.com/erocarrera/pydot \"\"\" def __init__(self, path=None): \"\"\"Give the path", "node.attr['label'] = text.replace(' ','\\\\n') node.attr['name'] = '' node.attr['shape'] = 'Mrecord'", "def write_to_dot(self, g, path): \"\"\"Input a pygraphviz AGraph object.\"\"\" with", "edge.attr['label'] == 'located_in': edge.attr['color'] = 'turquoise4' edge.attr['label'] = '' return", "edge.attr['color'] = 'turquoise4' edge.attr['label'] = '' return g def write_to_dot(self,", "# almost as child_to_parents def draw_with_networkx(self, g, path): \"\"\"Input a", "to a dot file.\"\"\" sh.dot(in_path, '-Tpdf', '-o', out_path) # ---------------------------", "node.attr['label'] #node.attr['label'] = \"{<f0> %s|<f1> %s}\" % (envo, text) for", "AGraph object.\"\"\" for node in g.nodes(): text = node.attr['name'] node.attr['label']", "'r') if line] new_text = [line.lstrip() for line in legend_txt.split('\\n')", "port=\"i2\">of</td></tr> <tr><td port=\"i3\">in</td></tr> </table>>]; key:i1:e -> key2:i1:w [color=red]; key:i2:e ->", "if isinstance(root, int): root = \"ENVO:%08d\" % root # Return", "from the OBO file's path. Other libraries not used here", "format. Doesn't seem to work until they update PyPI. *", "`orange` library. * http://orange.biolab.si * http://orange-bioinformatics.readthedocs.org/en/latest/ * https://github.com/biolab/orange-bio * https://bitbucket.org/biolab/orange-bioinformatics", "ENVO terms, get the subgraph that contains them all and", "= color return g def add_style(self, g): \"\"\"Input a pygraphviz", "def descends(self, e, root): \"\"\"Does the envo term `e` descend", "# Third party modules # import sh, networkx import matplotlib.colors", "\"\"\"Given a list of ENVO terms, get the subgraph that", "port=\"i3\">Located</td></tr> </table>>]; key2 [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td port=\"i1\">a</td></tr>", "it looses directionality. * https://networkx.readthedocs.org/en/stable/ To install: $ pip install", "pydot: https://github.com/erocarrera/pydot \"\"\" def __init__(self, path=None): \"\"\"Give the path to", "\"\"\"Input a pygraphviz AGraph object. Outputs a pygraphviz AGraph object.\"\"\"", "#node.attr['label'] = \"{<f0> %s|<f1> %s}\" % (envo, text) for edge", "cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td align=\"right\" port=\"i1\">Is</td></tr> <tr><td align=\"right\" port=\"i2\">Part</td></tr> <tr><td", "return g def write_to_dot(self, g, path): \"\"\"Input a pygraphviz AGraph", "return g @property_cached def networkx(self): \"\"\"The network converted to `networkx`", "their ancestors, up to the root. Outputs a networkx DiGraph", "# Testing mode # if envos is None: envos =", "\"{<f0> %s|<f1> %s}\" % (envo, text) for edge in g.edges():", "if line] new_text = [line.lstrip() for line in legend_txt.split('\\n') if", "legend_txt.split('\\n') if line] new_text = '\\n'.join(new_text + orig_txt[2:]) with open(path,", "# test_envos = [ \"ENVO:00000033\", \"ENVO:00000043\", \"ENVO:00000067\", \"ENVO:00000143\", \"ENVO:00000210\", \"ENVO:00000215\",", "= networkx.nx_agraph.to_agraph(g) if weights is None: return g for envo", "ancestors, up to the root. Outputs a networkx DiGraph object.\"\"\"", "--------------------------- In this section --------------------------- # # descends def descends(self,", "from goatools import obo_parser return obo_parser.GODag(self.path) @property_cached def orange_network(self): \"\"\"The", "if e is None: e = test_envos[0] # Goa #", "node `root`? Returns True or False.\"\"\" # Auto conversion #", "test this module # test_envos = [ \"ENVO:00000033\", \"ENVO:00000043\", \"ENVO:00000067\",", "e in envos for n in networkx.descendants(self.networkx, e)) nodes.update(envos) nodes", "node.attr['fillcolor'] = color return g def add_style(self, g): \"\"\"Input a", "envo in weights: node = g.get_node(envo) weight = weights[envo] color", "to a dot file.\"\"\" legend_txt = \"\"\" digraph { rankdir=LR", "{ rankdir=LR node [shape=plaintext,fontname=\"helvetica\"] subgraph cluster_01 { label = \"NB:", "def __init__(self, path=None): \"\"\"Give the path to the OBO file\"\"\"", "write_to_dot def get_subgraph(self, envos=None): \"\"\"Given a list of ENVO terms,", "# same as predecessors print networkx.descendants(self.networkx, e) # almost as", "%s}\" % (envo, text) for edge in g.edges(): if edge.attr['label']", "# networkx @property_cached def orange_obo(self): \"\"\"The ontology loaded by the", "https://bitbucket.org/biolab/orange-network/ * http://orange-network.readthedocs.org/en/latest/ To install: $ pip install orange-network \"\"\"", "key2:i2:w [color=blue]; key:i3:e -> key2:i3:w [color=turquoise4]; }\"\"\" orig_txt = [line.rstrip('\\n')", "<tr><td port=\"i2\">of</td></tr> <tr><td port=\"i3\">in</td></tr> </table>>]; key:i1:e -> key2:i1:w [color=red]; key:i2:e", "################################################################################ class Ontology(object): \"\"\"A object that gives you access to", "a networkx DiGraph object.\"\"\" # Testing mode # if envos", "= 'Mrecord' node.attr['style'] = 'filled' # To add the envo", "libraries not used here that could be added: * graphviz:", "assert networkx.is_directed_acyclic_graph(g) return g # --------------------------- In this section ---------------------------", "install orange-network \"\"\" return self.orange_obo.to_network() @property_cached def pygraphviz(self): \"\"\"The network", "goatools import obo_parser return obo_parser.GODag(self.path) @property_cached def orange_network(self): \"\"\"The network", "= '\\n'.join(new_text + orig_txt[2:]) with open(path, 'w') as handle: handle.write(new_text)", "g @property_cached def networkx(self): \"\"\"The network converted to `networkx` format.", "method to see a bit how the different libraries work.\"\"\"", "matplotlib.colors.rgb2hex((1.0, 1.0 - weight, 0.0)) node.attr['fillcolor'] = color return g", "list of ENVO terms, get the subgraph that contains them", "to the OBO file\"\"\" if path is None: path =", "g.nodes(): text = node.attr['name'] node.attr['label'] = text.replace(' ','\\\\n') node.attr['name'] =", "\"ENVO:%08d\" % e if isinstance(root, int): root = \"ENVO:%08d\" %", "networkx.nx_agraph.to_agraph(g) if weights is None: return g for envo in", "e)) nodes.update(envos) nodes = list(nodes) # Return # return self.networkx.subgraph(nodes)", "`e` descend from the node `root`? Returns True or False.\"\"\"", "the `orange` library. * http://orange.biolab.si * http://orange-bioinformatics.readthedocs.org/en/latest/ * https://github.com/biolab/orange-bio *", "label = \"NB: darker nodes weigh more\"; key [label=<<table border=\"0\"", "] ################################################################################ class Ontology(object): \"\"\"A object that gives you access", "self.networkx.subgraph(nodes) def add_weights(self, g, weights=None): \"\"\"Input a networkx DiGraph object.", "* http://orange-bioinformatics.readthedocs.org/en/latest/ * https://github.com/biolab/orange-bio * https://bitbucket.org/biolab/orange-bioinformatics To install: $ pip", "$ pip install goatools \"\"\" from goatools import obo_parser return", "# if e is None: e = test_envos[0] # Goa", "orange_obo # goatools # orange_network # pygraphviz # networkx @property_cached", "envo term `e` descend from the node `root`? Returns True", "to help test this module # test_envos = [ \"ENVO:00000033\",", "\"\"\" from orangecontrib.bio.ontology import OBOOntology return OBOOntology(self.path) @property_cached def goatools(self):", "of envos to help test this module # test_envos =", "for edge in g.edges(): if edge.attr['label'] == 'located_in': edge.attr['color'] =", "in envos for n in networkx.descendants(self.networkx, e)) nodes.update(envos) nodes =", "a pygraphviz AGraph object. Outputs a pygraphviz AGraph object.\"\"\" for", "id to each node, uncomment: #envo = node.attr['label'] #node.attr['label'] =", "you access to the graph (network with nodes and edges)", "weights[envo] color = matplotlib.colors.rgb2hex((1.0, 1.0 - weight, 0.0)) node.attr['fillcolor'] =", "https://github.com/erocarrera/pydot \"\"\" def __init__(self, path=None): \"\"\"Give the path to the", "# descends def descends(self, e, root): \"\"\"Does the envo term", "pip install orange-network \"\"\" return self.orange_obo.to_network() @property_cached def pygraphviz(self): \"\"\"The", "path. Other libraries not used here that could be added:", "add_legend(self, path): \"\"\"Input the path to a dot file.\"\"\" legend_txt", "\" print self.goatools[e] # Pygraphviz # print \"pygraphviz: \" print", "self.pygraphviz.get_node(e) # Networkx # import networkx print \"networkx: \" print", "key:i3:e -> key2:i3:w [color=turquoise4]; }\"\"\" orig_txt = [line.rstrip('\\n') for line", "converted to `networkx` format. Seems like it looses directionality. *", "import matplotlib.colors # A list of envos to help test", "= \"ENVO:%08d\" % e if isinstance(root, int): root = \"ENVO:%08d\"", "networkx.descendants(self.networkx, e) # almost as child_to_parents def draw_with_networkx(self, g, path):", "modules # # Internal modules # from seqenv import module_dir", "Other libraries not used here that could be added: *", "not used here that could be added: * graphviz: http://graphviz.readthedocs.org/en/latest/api.html#digraph", "# Networkx # import networkx print \"networkx: \" print self.networkx[e]", "print \"networkx: \" print self.networkx[e] print self.networkx.successors(e) print self.networkx.predecessors(e) print", "\"\"\"Input the path to a dot file.\"\"\" legend_txt = \"\"\"", "None: e = test_envos[0] # Goa # print \"Goa: \"", "from seqenv import module_dir from seqenv.common.cache import property_cached # Third", "# if envos is None: envos = test_envos # All", "# Goa # print \"Goa: \" print self.goatools[e] # Pygraphviz", "is None: envos = test_envos # All nodes # nodes", "http://orange-bioinformatics.readthedocs.org/en/latest/ * https://github.com/biolab/orange-bio * https://bitbucket.org/biolab/orange-bioinformatics To install: $ pip install", "weight, 0.0)) node.attr['fillcolor'] = color return g def add_style(self, g):", "object.\"\"\" with open(path, 'w') as handle: handle.write(g.to_string()) def add_legend(self, path):", "--------------------------- # # test # get_subgraph # add_weights # draw_to_pdf", "'filled' # To add the envo id to each node,", "networkx print \"networkx: \" print self.networkx[e] print self.networkx.successors(e) print self.networkx.predecessors(e)", "= [line.lstrip() for line in legend_txt.split('\\n') if line] new_text =", "line in legend_txt.split('\\n') if line] new_text = '\\n'.join(new_text + orig_txt[2:])", "cellborder=\"0\"> <tr><td align=\"right\" port=\"i1\">Is</td></tr> <tr><td align=\"right\" port=\"i2\">Part</td></tr> <tr><td align=\"right\" port=\"i3\">Located</td></tr>", "in g.nodes(): text = node.attr['name'] node.attr['label'] = text.replace(' ','\\\\n') node.attr['name']", "# Internal modules # from seqenv import module_dir from seqenv.common.cache", "node, uncomment: #envo = node.attr['label'] #node.attr['label'] = \"{<f0> %s|<f1> %s}\"", "add_weights # draw_to_pdf # write_to_dot def get_subgraph(self, envos=None): \"\"\"Given a", "= '' return g def write_to_dot(self, g, path): \"\"\"Input a", "install: $ pip install Orange-Bioinformatics \"\"\" from orangecontrib.bio.ontology import OBOOntology", "g = self.orange_obo.to_networkx() assert networkx.is_directed_acyclic_graph(g) return g # --------------------------- In", "{ label = \"NB: darker nodes weigh more\"; key [label=<<table", "is None: return g for envo in weights: node =", "network` format. Doesn't seem to work until they update PyPI.", "node = g.get_node(envo) weight = weights[envo] color = matplotlib.colors.rgb2hex((1.0, 1.0", "= [line.rstrip('\\n') for line in open(path, 'r') if line] new_text", "uncomment: #envo = node.attr['label'] #node.attr['label'] = \"{<f0> %s|<f1> %s}\" %", "= text.replace(' ','\\\\n') node.attr['name'] = '' node.attr['shape'] = 'Mrecord' node.attr['style']", "# # orange_obo # goatools # orange_network # pygraphviz #", "descends(self, e, root): \"\"\"Does the envo term `e` descend from", "get the subgraph that contains them all and all their", "# from seqenv import module_dir from seqenv.common.cache import property_cached #", "pygraphviz \"\"\" g = self.orange_obo.to_graphviz() assert g.is_directed() assert g.is_strict() return", "networkx(self): \"\"\"The network converted to `networkx` format. Seems like it", "g def add_style(self, g): \"\"\"Input a pygraphviz AGraph object. Outputs", "g.is_strict() return g @property_cached def networkx(self): \"\"\"The network converted to", "as child_to_parents def draw_with_networkx(self, g, path): \"\"\"Input a networkx DiGraph", "object.\"\"\" g = networkx.nx_agraph.to_agraph(g) if weights is None: return g", "print self.pygraphviz.successors(e) print self.pygraphviz.predecessors(e) print self.pygraphviz.get_node(e) # Networkx # import", "\"ENVO:%08d\" % root # Return # return e in networkx.ancestors(self.networkx,", "# --------------------------- In this section --------------------------- # # orange_obo #", "envos=None): \"\"\"Given a list of ENVO terms, get the subgraph", "ENVO ontology from the OBO file's path. Other libraries not", "install networkx \"\"\" g = self.orange_obo.to_networkx() assert networkx.is_directed_acyclic_graph(g) return g", "object. Outputs a pygraphviz AGraph object.\"\"\" g = networkx.nx_agraph.to_agraph(g) if", "pygraphviz AGraph object. Outputs a pygraphviz AGraph object.\"\"\" for node", "OBO file's path. Other libraries not used here that could", "is None: path = module_dir + 'data_envo/envo.obo' self.path = path", "e=None): \"\"\"Just a method to see a bit how the", "network loaded into goatools' format. * https://github.com/tanghaibao/goatools To install: $", "# add_weights # draw_to_pdf # write_to_dot def get_subgraph(self, envos=None): \"\"\"Given", "@property_cached def orange_network(self): \"\"\"The network converted to `orange network` format.", "library. * http://orange.biolab.si * http://orange-bioinformatics.readthedocs.org/en/latest/ * https://github.com/biolab/orange-bio * https://bitbucket.org/biolab/orange-bioinformatics To", "almost as child_to_parents def draw_with_networkx(self, g, path): \"\"\"Input a networkx", "Third party modules # import sh, networkx import matplotlib.colors #", "# orange_network # pygraphviz # networkx @property_cached def orange_obo(self): \"\"\"The", "import obo_parser return obo_parser.GODag(self.path) @property_cached def orange_network(self): \"\"\"The network converted", "key [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td align=\"right\" port=\"i1\">Is</td></tr> <tr><td", "them all and all their ancestors, up to the root.", "def pygraphviz(self): \"\"\"The network converted to `pygraphviz` format. * http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/", "\"\"\"Input a path to a dot file.\"\"\" sh.dot(in_path, '-Tpdf', '-o',", "\"ENVO:00000215\", \"ENVO:00000475\", ] ################################################################################ class Ontology(object): \"\"\"A object that gives", "a pygraphviz AGraph object.\"\"\" g = networkx.nx_agraph.to_agraph(g) if weights is", "# Return # return self.networkx.subgraph(nodes) def add_weights(self, g, weights=None): \"\"\"Input", "self.networkx[e] print self.networkx.successors(e) print self.networkx.predecessors(e) print networkx.ancestors(self.networkx, e) # same", "that could be added: * graphviz: http://graphviz.readthedocs.org/en/latest/api.html#digraph * pydot: https://github.com/erocarrera/pydot", "# return e in networkx.ancestors(self.networkx, root) # --------------------------- In this", "# get_subgraph # add_weights # draw_to_pdf # write_to_dot def get_subgraph(self,", "'w') as handle: handle.write(new_text) def draw_to_pdf(self, in_path, out_path): \"\"\"Input a", "graph (network with nodes and edges) of the ENVO ontology", "self.orange_obo.to_networkx() assert networkx.is_directed_acyclic_graph(g) return g # --------------------------- In this section", "this section --------------------------- # # orange_obo # goatools # orange_network", "# To add the envo id to each node, uncomment:", "line] new_text = '\\n'.join(new_text + orig_txt[2:]) with open(path, 'w') as", "new_text = '\\n'.join(new_text + orig_txt[2:]) with open(path, 'w') as handle:", "path is None: path = module_dir + 'data_envo/envo.obo' self.path =", "test_envos[0] # Goa # print \"Goa: \" print self.goatools[e] #", "that gives you access to the graph (network with nodes", "networkx.ancestors(self.networkx, root) # --------------------------- In this section --------------------------- # #", "matplotlib.colors # A list of envos to help test this", "# print \"Goa: \" print self.goatools[e] # Pygraphviz # print", "the subgraph that contains them all and all their ancestors,", "return OBOOntology(self.path) @property_cached def goatools(self): \"\"\"The network loaded into goatools'", "object that gives you access to the graph (network with", "network converted to `networkx` format. Seems like it looses directionality.", "goatools \"\"\" from goatools import obo_parser return obo_parser.GODag(self.path) @property_cached def", "'w') as handle: handle.write(g.to_string()) def add_legend(self, path): \"\"\"Input the path", "test_envos # All nodes # nodes = set(n for e", "https://github.com/tanghaibao/goatools To install: $ pip install goatools \"\"\" from goatools", "DiGraph object.\"\"\" # Testing mode # if envos is None:", "# nodes = set(n for e in envos for n", "<tr><td port=\"i3\">in</td></tr> </table>>]; key:i1:e -> key2:i1:w [color=red]; key:i2:e -> key2:i2:w", "to `networkx` format. Seems like it looses directionality. * https://networkx.readthedocs.org/en/stable/", "line] new_text = [line.lstrip() for line in legend_txt.split('\\n') if line]", "[shape=plaintext,fontname=\"helvetica\"] subgraph cluster_01 { label = \"NB: darker nodes weigh", "is None: e = test_envos[0] # Goa # print \"Goa:", "draw_to_pdf(self, in_path, out_path): \"\"\"Input a path to a dot file.\"\"\"", "http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/ To install: $ pip install pygraphviz \"\"\" g =", "# draw_with_networkx # draw_with_pygraphviz def print_test(self, e=None): \"\"\"Just a method", "g.edges(): if edge.attr['label'] == 'located_in': edge.attr['color'] = 'turquoise4' edge.attr['label'] =", "cellspacing=\"0\" cellborder=\"0\"> <tr><td port=\"i1\">a</td></tr> <tr><td port=\"i2\">of</td></tr> <tr><td port=\"i3\">in</td></tr> </table>>]; key:i1:e", "nodes.update(envos) nodes = list(nodes) # Return # return self.networkx.subgraph(nodes) def", "write_to_dot(self, g, path): \"\"\"Input a pygraphviz AGraph object.\"\"\" with open(path,", "# return self.networkx.subgraph(nodes) def add_weights(self, g, weights=None): \"\"\"Input a networkx", "from the node `root`? Returns True or False.\"\"\" # Auto", "converted to `pygraphviz` format. * http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/ To install: $ pip", "Networkx # import networkx print \"networkx: \" print self.networkx[e] print", "node in g.nodes(): text = node.attr['name'] node.attr['label'] = text.replace(' ','\\\\n')", "e) # same as predecessors print networkx.descendants(self.networkx, e) # almost", "<tr><td align=\"right\" port=\"i1\">Is</td></tr> <tr><td align=\"right\" port=\"i2\">Part</td></tr> <tr><td align=\"right\" port=\"i3\">Located</td></tr> </table>>];", "# draw_to_pdf # write_to_dot def get_subgraph(self, envos=None): \"\"\"Given a list", "path): \"\"\"Input a pygraphviz AGraph object.\"\"\" with open(path, 'w') as", "e = test_envos[0] # Goa # print \"Goa: \" print", "module_dir + 'data_envo/envo.obo' self.path = path # --------------------------- In this", "format. * https://github.com/tanghaibao/goatools To install: $ pip install goatools \"\"\"", "\"ENVO:00000043\", \"ENVO:00000067\", \"ENVO:00000143\", \"ENVO:00000210\", \"ENVO:00000215\", \"ENVO:00000475\", ] ################################################################################ class Ontology(object):", "seem to work until they update PyPI. * https://bitbucket.org/biolab/orange-network/ *", "None: return g for envo in weights: node = g.get_node(envo)", "Test node # if e is None: e = test_envos[0]", "import sh, networkx import matplotlib.colors # A list of envos", "port=\"i1\">a</td></tr> <tr><td port=\"i2\">of</td></tr> <tr><td port=\"i3\">in</td></tr> </table>>]; key:i1:e -> key2:i1:w [color=red];", "pygraphviz AGraph object.\"\"\" with open(path, 'w') as handle: handle.write(g.to_string()) def", "= \"{<f0> %s|<f1> %s}\" % (envo, text) for edge in", "http://orange-network.readthedocs.org/en/latest/ To install: $ pip install orange-network \"\"\" return self.orange_obo.to_network()", "subgraph that contains them all and all their ancestors, up", "text.replace(' ','\\\\n') node.attr['name'] = '' node.attr['shape'] = 'Mrecord' node.attr['style'] =", "this section --------------------------- # # descends def descends(self, e, root):", "@property_cached def pygraphviz(self): \"\"\"The network converted to `pygraphviz` format. *", "Outputs a networkx DiGraph object.\"\"\" # Testing mode # if", "%s|<f1> %s}\" % (envo, text) for edge in g.edges(): if", "a bit how the different libraries work.\"\"\" # Test node", "the graph (network with nodes and edges) of the ENVO", "if edge.attr['label'] == 'located_in': edge.attr['color'] = 'turquoise4' edge.attr['label'] = ''", "= g.get_node(envo) weight = weights[envo] color = matplotlib.colors.rgb2hex((1.0, 1.0 -", "subgraph cluster_01 { label = \"NB: darker nodes weigh more\";", "legend_txt = \"\"\" digraph { rankdir=LR node [shape=plaintext,fontname=\"helvetica\"] subgraph cluster_01", "a path to a dot file.\"\"\" sh.dot(in_path, '-Tpdf', '-o', out_path)", "pygraphviz # networkx @property_cached def orange_obo(self): \"\"\"The ontology loaded by", "--------------------------- # # print_test # draw_with_networkx # draw_with_pygraphviz def print_test(self,", "for e in envos for n in networkx.descendants(self.networkx, e)) nodes.update(envos)", "* http://orange.biolab.si * http://orange-bioinformatics.readthedocs.org/en/latest/ * https://github.com/biolab/orange-bio * https://bitbucket.org/biolab/orange-bioinformatics To install:", "different libraries work.\"\"\" # Test node # if e is", "e, root): \"\"\"Does the envo term `e` descend from the", "\" print self.networkx[e] print self.networkx.successors(e) print self.networkx.predecessors(e) print networkx.ancestors(self.networkx, e)", "sh, networkx import matplotlib.colors # A list of envos to", "list(nodes) # Return # return self.networkx.subgraph(nodes) def add_weights(self, g, weights=None):", "\"\"\"The network converted to `networkx` format. Seems like it looses", "path): \"\"\"Input the path to a dot file.\"\"\" legend_txt =", "list of envos to help test this module # test_envos", "install pygraphviz \"\"\" g = self.orange_obo.to_graphviz() assert g.is_directed() assert g.is_strict()", "OBOOntology(self.path) @property_cached def goatools(self): \"\"\"The network loaded into goatools' format.", "\"ENVO:00000033\", \"ENVO:00000043\", \"ENVO:00000067\", \"ENVO:00000143\", \"ENVO:00000210\", \"ENVO:00000215\", \"ENVO:00000475\", ] ################################################################################ class", "g # --------------------------- In this section --------------------------- # # test", "g = self.orange_obo.to_graphviz() assert g.is_directed() assert g.is_strict() return g @property_cached", "property_cached # Third party modules # import sh, networkx import", "see a bit how the different libraries work.\"\"\" # Test", "[label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td port=\"i1\">a</td></tr> <tr><td port=\"i2\">of</td></tr> <tr><td", "and all their ancestors, up to the root. Outputs a", "print self.pygraphviz.predecessors(e) print self.pygraphviz.get_node(e) # Networkx # import networkx print", "key2:i3:w [color=turquoise4]; }\"\"\" orig_txt = [line.rstrip('\\n') for line in open(path,", "node [shape=plaintext,fontname=\"helvetica\"] subgraph cluster_01 { label = \"NB: darker nodes", "draw_to_pdf # write_to_dot def get_subgraph(self, envos=None): \"\"\"Given a list of", "of ENVO terms, get the subgraph that contains them all", "for node in g.nodes(): text = node.attr['name'] node.attr['label'] = text.replace('", "libraries work.\"\"\" # Test node # if e is None:", "* http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/ To install: $ pip install pygraphviz \"\"\" g", "modules # from seqenv import module_dir from seqenv.common.cache import property_cached", "sh.dot(in_path, '-Tpdf', '-o', out_path) # --------------------------- In this section ---------------------------", "get_subgraph # add_weights # draw_to_pdf # write_to_dot def get_subgraph(self, envos=None):", "in networkx.ancestors(self.networkx, root) # --------------------------- In this section --------------------------- #", "'turquoise4' edge.attr['label'] = '' return g def write_to_dot(self, g, path):", "open(path, 'w') as handle: handle.write(g.to_string()) def add_legend(self, path): \"\"\"Input the", "goatools # orange_network # pygraphviz # networkx @property_cached def orange_obo(self):", "\"\"\" g = self.orange_obo.to_graphviz() assert g.is_directed() assert g.is_strict() return g", "all their ancestors, up to the root. Outputs a networkx", "[color=turquoise4]; }\"\"\" orig_txt = [line.rstrip('\\n') for line in open(path, 'r')", "Doesn't seem to work until they update PyPI. * https://bitbucket.org/biolab/orange-network/", "with open(path, 'w') as handle: handle.write(new_text) def draw_to_pdf(self, in_path, out_path):", "# # Internal modules # from seqenv import module_dir from", "Orange-Bioinformatics \"\"\" from orangecontrib.bio.ontology import OBOOntology return OBOOntology(self.path) @property_cached def", "== 'located_in': edge.attr['color'] = 'turquoise4' edge.attr['label'] = '' return g", "g, path): \"\"\"Input a pygraphviz AGraph object.\"\"\" with open(path, 'w')", "in open(path, 'r') if line] new_text = [line.lstrip() for line", "'-o', out_path) # --------------------------- In this section --------------------------- # #", "print self.networkx.successors(e) print self.networkx.predecessors(e) print networkx.ancestors(self.networkx, e) # same as", "work.\"\"\" # Test node # if e is None: e", "OBO file\"\"\" if path is None: path = module_dir +", "'\\n'.join(new_text + orig_txt[2:]) with open(path, 'w') as handle: handle.write(new_text) def", "networkx import matplotlib.colors # A list of envos to help", "True or False.\"\"\" # Auto conversion # if isinstance(e, int):", "1.0 - weight, 0.0)) node.attr['fillcolor'] = color return g def", "orange_network(self): \"\"\"The network converted to `orange network` format. Doesn't seem", "used here that could be added: * graphviz: http://graphviz.readthedocs.org/en/latest/api.html#digraph *", "section --------------------------- # # test # get_subgraph # add_weights #", "envos = test_envos # All nodes # nodes = set(n", "e) # almost as child_to_parents def draw_with_networkx(self, g, path): \"\"\"Input", "g def write_to_dot(self, g, path): \"\"\"Input a pygraphviz AGraph object.\"\"\"", "Returns True or False.\"\"\" # Auto conversion # if isinstance(e,", "obo_parser return obo_parser.GODag(self.path) @property_cached def orange_network(self): \"\"\"The network converted to", "assert g.is_directed() assert g.is_strict() return g @property_cached def networkx(self): \"\"\"The", "for envo in weights: node = g.get_node(envo) weight = weights[envo]", "the ENVO ontology from the OBO file's path. Other libraries", "weight = weights[envo] color = matplotlib.colors.rgb2hex((1.0, 1.0 - weight, 0.0))", "Ontology(object): \"\"\"A object that gives you access to the graph", "each node, uncomment: #envo = node.attr['label'] #node.attr['label'] = \"{<f0> %s|<f1>", "= module_dir + 'data_envo/envo.obo' self.path = path # --------------------------- In", "\"\"\"Give the path to the OBO file\"\"\" if path is", "cluster_01 { label = \"NB: darker nodes weigh more\"; key", "nodes and edges) of the ENVO ontology from the OBO", "# test # get_subgraph # add_weights # draw_to_pdf # write_to_dot", "def get_subgraph(self, envos=None): \"\"\"Given a list of ENVO terms, get", "<tr><td align=\"right\" port=\"i3\">Located</td></tr> </table>>]; key2 [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\">", "in weights: node = g.get_node(envo) weight = weights[envo] color =", "http://graphviz.readthedocs.org/en/latest/api.html#digraph * pydot: https://github.com/erocarrera/pydot \"\"\" def __init__(self, path=None): \"\"\"Give the", "</table>>]; key:i1:e -> key2:i1:w [color=red]; key:i2:e -> key2:i2:w [color=blue]; key:i3:e", "= '' node.attr['shape'] = 'Mrecord' node.attr['style'] = 'filled' # To", "the path to a dot file.\"\"\" legend_txt = \"\"\" digraph", "section --------------------------- # # print_test # draw_with_networkx # draw_with_pygraphviz def", "In this section --------------------------- # # orange_obo # goatools #", "return e in networkx.ancestors(self.networkx, root) # --------------------------- In this section", "into goatools' format. * https://github.com/tanghaibao/goatools To install: $ pip install", "in legend_txt.split('\\n') if line] new_text = '\\n'.join(new_text + orig_txt[2:]) with", "# --------------------------- In this section --------------------------- # # print_test #", "a list of ENVO terms, get the subgraph that contains", "node.attr['name'] = '' node.attr['shape'] = 'Mrecord' node.attr['style'] = 'filled' #", "-> key2:i3:w [color=turquoise4]; }\"\"\" orig_txt = [line.rstrip('\\n') for line in", "return obo_parser.GODag(self.path) @property_cached def orange_network(self): \"\"\"The network converted to `orange", "update PyPI. * https://bitbucket.org/biolab/orange-network/ * http://orange-network.readthedocs.org/en/latest/ To install: $ pip", "import property_cached # Third party modules # import sh, networkx", "new_text = [line.lstrip() for line in legend_txt.split('\\n') if line] new_text", "# import networkx print \"networkx: \" print self.networkx[e] print self.networkx.successors(e)", "here that could be added: * graphviz: http://graphviz.readthedocs.org/en/latest/api.html#digraph * pydot:", "goatools(self): \"\"\"The network loaded into goatools' format. * https://github.com/tanghaibao/goatools To", "object.\"\"\" # Testing mode # if envos is None: envos", "color return g def add_style(self, g): \"\"\"Input a pygraphviz AGraph", "object.\"\"\" for node in g.nodes(): text = node.attr['name'] node.attr['label'] =", "ontology from the OBO file's path. Other libraries not used", "the OBO file\"\"\" if path is None: path = module_dir", "orange_obo(self): \"\"\"The ontology loaded by the `orange` library. * http://orange.biolab.si", "return self.networkx.subgraph(nodes) def add_weights(self, g, weights=None): \"\"\"Input a networkx DiGraph", "[line.rstrip('\\n') for line in open(path, 'r') if line] new_text =", "install Orange-Bioinformatics \"\"\" from orangecontrib.bio.ontology import OBOOntology return OBOOntology(self.path) @property_cached", "weights is None: return g for envo in weights: node", "Internal modules # from seqenv import module_dir from seqenv.common.cache import", "party modules # import sh, networkx import matplotlib.colors # A", "self.orange_obo.to_network() @property_cached def pygraphviz(self): \"\"\"The network converted to `pygraphviz` format.", "predecessors print networkx.descendants(self.networkx, e) # almost as child_to_parents def draw_with_networkx(self,", "envos for n in networkx.descendants(self.networkx, e)) nodes.update(envos) nodes = list(nodes)", "To add the envo id to each node, uncomment: #envo", "None: path = module_dir + 'data_envo/envo.obo' self.path = path #", "[color=blue]; key:i3:e -> key2:i3:w [color=turquoise4]; }\"\"\" orig_txt = [line.rstrip('\\n') for", "def add_style(self, g): \"\"\"Input a pygraphviz AGraph object. Outputs a", "-> key2:i1:w [color=red]; key:i2:e -> key2:i2:w [color=blue]; key:i3:e -> key2:i3:w", "# draw_with_pygraphviz def print_test(self, e=None): \"\"\"Just a method to see", "align=\"right\" port=\"i1\">Is</td></tr> <tr><td align=\"right\" port=\"i2\">Part</td></tr> <tr><td align=\"right\" port=\"i3\">Located</td></tr> </table>>]; key2", "root) # --------------------------- In this section --------------------------- # # print_test", "by the `orange` library. * http://orange.biolab.si * http://orange-bioinformatics.readthedocs.org/en/latest/ * https://github.com/biolab/orange-bio", "like it looses directionality. * https://networkx.readthedocs.org/en/stable/ To install: $ pip", "to the graph (network with nodes and edges) of the", "from matplotlib import pyplot networkx.draw(g) pyplot.savefig(path) pyplot.close() def draw_with_pygraphviz(self, g,", "directionality. * https://networkx.readthedocs.org/en/stable/ To install: $ pip install networkx \"\"\"", "converted to `orange network` format. Doesn't seem to work until", "in networkx.descendants(self.networkx, e)) nodes.update(envos) nodes = list(nodes) # Return #", "object. Outputs a pygraphviz AGraph object.\"\"\" for node in g.nodes():", "networkx DiGraph object.\"\"\" from matplotlib import pyplot networkx.draw(g) pyplot.savefig(path) pyplot.close()", "looses directionality. * https://networkx.readthedocs.org/en/stable/ To install: $ pip install networkx", "= self.orange_obo.to_networkx() assert networkx.is_directed_acyclic_graph(g) return g # --------------------------- In this", "to the root. Outputs a networkx DiGraph object.\"\"\" # Testing", "'' return g def write_to_dot(self, g, path): \"\"\"Input a pygraphviz", "\"ENVO:00000067\", \"ENVO:00000143\", \"ENVO:00000210\", \"ENVO:00000215\", \"ENVO:00000475\", ] ################################################################################ class Ontology(object): \"\"\"A", "a pygraphviz AGraph object.\"\"\" for node in g.nodes(): text =", "\"\"\" from goatools import obo_parser return obo_parser.GODag(self.path) @property_cached def orange_network(self):", "a method to see a bit how the different libraries", "* https://networkx.readthedocs.org/en/stable/ To install: $ pip install networkx \"\"\" g", "# print \"pygraphviz: \" print self.pygraphviz[e] print self.pygraphviz.successors(e) print self.pygraphviz.predecessors(e)", "import OBOOntology return OBOOntology(self.path) @property_cached def goatools(self): \"\"\"The network loaded", "if weights is None: return g for envo in weights:", "line in open(path, 'r') if line] new_text = [line.lstrip() for", "[label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td align=\"right\" port=\"i1\">Is</td></tr> <tr><td align=\"right\"", "<tr><td port=\"i1\">a</td></tr> <tr><td port=\"i2\">of</td></tr> <tr><td port=\"i3\">in</td></tr> </table>>]; key:i1:e -> key2:i1:w", "https://bitbucket.org/biolab/orange-bioinformatics To install: $ pip install Orange-Bioinformatics \"\"\" from orangecontrib.bio.ontology", "as handle: handle.write(new_text) def draw_to_pdf(self, in_path, out_path): \"\"\"Input a path", "pyplot.savefig(path) pyplot.close() def draw_with_pygraphviz(self, g, path): \"\"\"Input a pygraphviz AGraph", "orig_txt[2:]) with open(path, 'w') as handle: handle.write(new_text) def draw_to_pdf(self, in_path,", "To install: $ pip install goatools \"\"\" from goatools import", "g = networkx.nx_agraph.to_agraph(g) if weights is None: return g for", "}\"\"\" orig_txt = [line.rstrip('\\n') for line in open(path, 'r') if", "def draw_with_networkx(self, g, path): \"\"\"Input a networkx DiGraph object.\"\"\" from", "def add_legend(self, path): \"\"\"Input the path to a dot file.\"\"\"", "self.networkx.predecessors(e) print networkx.ancestors(self.networkx, e) # same as predecessors print networkx.descendants(self.networkx,", "term `e` descend from the node `root`? Returns True or", "def orange_network(self): \"\"\"The network converted to `orange network` format. Doesn't", "0.0)) node.attr['fillcolor'] = color return g def add_style(self, g): \"\"\"Input", "added: * graphviz: http://graphviz.readthedocs.org/en/latest/api.html#digraph * pydot: https://github.com/erocarrera/pydot \"\"\" def __init__(self,", "`pygraphviz` format. * http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/ To install: $ pip install pygraphviz", "path # --------------------------- In this section --------------------------- # # orange_obo", "the root. Outputs a networkx DiGraph object.\"\"\" # Testing mode", "the envo term `e` descend from the node `root`? Returns", "In this section --------------------------- # # test # get_subgraph #", "Goa # print \"Goa: \" print self.goatools[e] # Pygraphviz #", "same as predecessors print networkx.descendants(self.networkx, e) # almost as child_to_parents", "To install: $ pip install Orange-Bioinformatics \"\"\" from orangecontrib.bio.ontology import", "draw_with_pygraphviz def print_test(self, e=None): \"\"\"Just a method to see a", "# --------------------------- In this section --------------------------- # # descends def", "g, weights=None): \"\"\"Input a networkx DiGraph object. Outputs a pygraphviz", "Built-in modules # # Internal modules # from seqenv import", "get_subgraph(self, envos=None): \"\"\"Given a list of ENVO terms, get the", "`root`? Returns True or False.\"\"\" # Auto conversion # if", "the path to the OBO file\"\"\" if path is None:", "A list of envos to help test this module #", "that contains them all and all their ancestors, up to", "# orange_obo # goatools # orange_network # pygraphviz # networkx", "= set(n for e in envos for n in networkx.descendants(self.networkx,", "\"\"\" g = self.orange_obo.to_networkx() assert networkx.is_directed_acyclic_graph(g) return g # ---------------------------", "'located_in': edge.attr['color'] = 'turquoise4' edge.attr['label'] = '' return g def", "g.get_node(envo) weight = weights[envo] color = matplotlib.colors.rgb2hex((1.0, 1.0 - weight,", "file\"\"\" if path is None: path = module_dir + 'data_envo/envo.obo'", "contains them all and all their ancestors, up to the", "self.pygraphviz.successors(e) print self.pygraphviz.predecessors(e) print self.pygraphviz.get_node(e) # Networkx # import networkx", "draw_with_pygraphviz(self, g, path): \"\"\"Input a pygraphviz AGraph object.\"\"\" with open(path,", "terms, get the subgraph that contains them all and all", "Pygraphviz # print \"pygraphviz: \" print self.pygraphviz[e] print self.pygraphviz.successors(e) print", "key:i1:e -> key2:i1:w [color=red]; key:i2:e -> key2:i2:w [color=blue]; key:i3:e ->", "To install: $ pip install orange-network \"\"\" return self.orange_obo.to_network() @property_cached", "--------------------------- # # orange_obo # goatools # orange_network # pygraphviz", "pygraphviz AGraph object.\"\"\" for node in g.nodes(): text = node.attr['name']", "install: $ pip install orange-network \"\"\" return self.orange_obo.to_network() @property_cached def", "self.orange_obo.to_graphviz() assert g.is_directed() assert g.is_strict() return g @property_cached def networkx(self):", "open(path, 'w') as handle: handle.write(new_text) def draw_to_pdf(self, in_path, out_path): \"\"\"Input", "def draw_to_pdf(self, in_path, out_path): \"\"\"Input a path to a dot", "* http://orange-network.readthedocs.org/en/latest/ To install: $ pip install orange-network \"\"\" return", "pygraphviz(self): \"\"\"The network converted to `pygraphviz` format. * http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/ To", "nodes # nodes = set(n for e in envos for", "print networkx.descendants(self.networkx, e) # almost as child_to_parents def draw_with_networkx(self, g,", "pip install Orange-Bioinformatics \"\"\" from orangecontrib.bio.ontology import OBOOntology return OBOOntology(self.path)", "all and all their ancestors, up to the root. Outputs", "(network with nodes and edges) of the ENVO ontology from", "# --------------------------- In this section --------------------------- # # test #", "% root # Return # return e in networkx.ancestors(self.networkx, root)", "http://orange.biolab.si * http://orange-bioinformatics.readthedocs.org/en/latest/ * https://github.com/biolab/orange-bio * https://bitbucket.org/biolab/orange-bioinformatics To install: $", "descend from the node `root`? Returns True or False.\"\"\" #", "port=\"i1\">Is</td></tr> <tr><td align=\"right\" port=\"i2\">Part</td></tr> <tr><td align=\"right\" port=\"i3\">Located</td></tr> </table>>]; key2 [label=<<table", "key2 [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td port=\"i1\">a</td></tr> <tr><td port=\"i2\">of</td></tr>", "child_to_parents def draw_with_networkx(self, g, path): \"\"\"Input a networkx DiGraph object.\"\"\"", "\"\"\"Just a method to see a bit how the different", "node # if e is None: e = test_envos[0] #", "# Auto conversion # if isinstance(e, int): e = \"ENVO:%08d\"", "class Ontology(object): \"\"\"A object that gives you access to the", "= node.attr['label'] #node.attr['label'] = \"{<f0> %s|<f1> %s}\" % (envo, text)", "access to the graph (network with nodes and edges) of", "'Mrecord' node.attr['style'] = 'filled' # To add the envo id", "goatools' format. * https://github.com/tanghaibao/goatools To install: $ pip install goatools", "= \"ENVO:%08d\" % root # Return # return e in", "the node `root`? Returns True or False.\"\"\" # Auto conversion", "from seqenv.common.cache import property_cached # Third party modules # import", "a pygraphviz AGraph object.\"\"\" with open(path, 'w') as handle: handle.write(g.to_string())", "networkx @property_cached def orange_obo(self): \"\"\"The ontology loaded by the `orange`", "path to the OBO file\"\"\" if path is None: path", "digraph { rankdir=LR node [shape=plaintext,fontname=\"helvetica\"] subgraph cluster_01 { label =", "- weight, 0.0)) node.attr['fillcolor'] = color return g def add_style(self,", "as handle: handle.write(g.to_string()) def add_legend(self, path): \"\"\"Input the path to", "bit how the different libraries work.\"\"\" # Test node #", "or False.\"\"\" # Auto conversion # if isinstance(e, int): e", "isinstance(root, int): root = \"ENVO:%08d\" % root # Return #", "add_style(self, g): \"\"\"Input a pygraphviz AGraph object. Outputs a pygraphviz", "= test_envos # All nodes # nodes = set(n for", "AGraph object.\"\"\" with open(path, 'w') as handle: handle.write(g.to_string()) def add_legend(self,", "orange-network \"\"\" return self.orange_obo.to_network() @property_cached def pygraphviz(self): \"\"\"The network converted", "= 'filled' # To add the envo id to each", "networkx DiGraph object.\"\"\" # Testing mode # if envos is", "False.\"\"\" # Auto conversion # if isinstance(e, int): e =", "install: $ pip install networkx \"\"\" g = self.orange_obo.to_networkx() assert", "path = module_dir + 'data_envo/envo.obo' self.path = path # ---------------------------", "more\"; key [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td align=\"right\" port=\"i1\">Is</td></tr>", "\"pygraphviz: \" print self.pygraphviz[e] print self.pygraphviz.successors(e) print self.pygraphviz.predecessors(e) print self.pygraphviz.get_node(e)", "rankdir=LR node [shape=plaintext,fontname=\"helvetica\"] subgraph cluster_01 { label = \"NB: darker", "be added: * graphviz: http://graphviz.readthedocs.org/en/latest/api.html#digraph * pydot: https://github.com/erocarrera/pydot \"\"\" def", "set(n for e in envos for n in networkx.descendants(self.networkx, e))", "# # descends def descends(self, e, root): \"\"\"Does the envo", "networkx.descendants(self.networkx, e)) nodes.update(envos) nodes = list(nodes) # Return # return", "self.networkx.successors(e) print self.networkx.predecessors(e) print networkx.ancestors(self.networkx, e) # same as predecessors", "None: envos = test_envos # All nodes # nodes =", "out_path) # --------------------------- In this section --------------------------- # # descends", "section --------------------------- # # descends def descends(self, e, root): \"\"\"Does", "print_test # draw_with_networkx # draw_with_pygraphviz def print_test(self, e=None): \"\"\"Just a", "networkx.draw(g) pyplot.savefig(path) pyplot.close() def draw_with_pygraphviz(self, g, path): \"\"\"Input a pygraphviz", "# pygraphviz # networkx @property_cached def orange_obo(self): \"\"\"The ontology loaded", "if isinstance(e, int): e = \"ENVO:%08d\" % e if isinstance(root,", "# A list of envos to help test this module", "PyPI. * https://bitbucket.org/biolab/orange-network/ * http://orange-network.readthedocs.org/en/latest/ To install: $ pip install", "def print_test(self, e=None): \"\"\"Just a method to see a bit", "In this section --------------------------- # # print_test # draw_with_networkx #", "a dot file.\"\"\" sh.dot(in_path, '-Tpdf', '-o', out_path) # --------------------------- In", "OBOOntology return OBOOntology(self.path) @property_cached def goatools(self): \"\"\"The network loaded into", "install goatools \"\"\" from goatools import obo_parser return obo_parser.GODag(self.path) @property_cached", "# All nodes # nodes = set(n for e in", "darker nodes weigh more\"; key [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\">", "align=\"right\" port=\"i3\">Located</td></tr> </table>>]; key2 [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td", "#envo = node.attr['label'] #node.attr['label'] = \"{<f0> %s|<f1> %s}\" % (envo,", "networkx \"\"\" g = self.orange_obo.to_networkx() assert networkx.is_directed_acyclic_graph(g) return g #", "'data_envo/envo.obo' self.path = path # --------------------------- In this section ---------------------------", "\" print self.pygraphviz[e] print self.pygraphviz.successors(e) print self.pygraphviz.predecessors(e) print self.pygraphviz.get_node(e) #", "to see a bit how the different libraries work.\"\"\" #", "self.goatools[e] # Pygraphviz # print \"pygraphviz: \" print self.pygraphviz[e] print", "this module # test_envos = [ \"ENVO:00000033\", \"ENVO:00000043\", \"ENVO:00000067\", \"ENVO:00000143\",", "def add_weights(self, g, weights=None): \"\"\"Input a networkx DiGraph object. Outputs", "root. Outputs a networkx DiGraph object.\"\"\" # Testing mode #", "+ orig_txt[2:]) with open(path, 'w') as handle: handle.write(new_text) def draw_to_pdf(self,", "in_path, out_path): \"\"\"Input a path to a dot file.\"\"\" sh.dot(in_path,", "draw_with_networkx # draw_with_pygraphviz def print_test(self, e=None): \"\"\"Just a method to", "-> key2:i2:w [color=blue]; key:i3:e -> key2:i3:w [color=turquoise4]; }\"\"\" orig_txt =", "file's path. Other libraries not used here that could be", "@property_cached def goatools(self): \"\"\"The network loaded into goatools' format. *", "print \"pygraphviz: \" print self.pygraphviz[e] print self.pygraphviz.successors(e) print self.pygraphviz.predecessors(e) print", "to work until they update PyPI. * https://bitbucket.org/biolab/orange-network/ * http://orange-network.readthedocs.org/en/latest/", "draw_with_networkx(self, g, path): \"\"\"Input a networkx DiGraph object.\"\"\" from matplotlib", "align=\"right\" port=\"i2\">Part</td></tr> <tr><td align=\"right\" port=\"i3\">Located</td></tr> </table>>]; key2 [label=<<table border=\"0\" cellpadding=\"2\"", "[ \"ENVO:00000033\", \"ENVO:00000043\", \"ENVO:00000067\", \"ENVO:00000143\", \"ENVO:00000210\", \"ENVO:00000215\", \"ENVO:00000475\", ] ################################################################################", "the OBO file's path. Other libraries not used here that", "open(path, 'r') if line] new_text = [line.lstrip() for line in", "\"\"\"A object that gives you access to the graph (network", "loaded into goatools' format. * https://github.com/tanghaibao/goatools To install: $ pip", "graphviz: http://graphviz.readthedocs.org/en/latest/api.html#digraph * pydot: https://github.com/erocarrera/pydot \"\"\" def __init__(self, path=None): \"\"\"Give", "= [ \"ENVO:00000033\", \"ENVO:00000043\", \"ENVO:00000067\", \"ENVO:00000143\", \"ENVO:00000210\", \"ENVO:00000215\", \"ENVO:00000475\", ]", "node.attr['style'] = 'filled' # To add the envo id to", "AGraph object.\"\"\" g = networkx.nx_agraph.to_agraph(g) if weights is None: return", "= \"\"\" digraph { rankdir=LR node [shape=plaintext,fontname=\"helvetica\"] subgraph cluster_01 {", "install: $ pip install goatools \"\"\" from goatools import obo_parser", "root = \"ENVO:%08d\" % root # Return # return e", "pyplot.close() def draw_with_pygraphviz(self, g, path): \"\"\"Input a pygraphviz AGraph object.\"\"\"", "pip install goatools \"\"\" from goatools import obo_parser return obo_parser.GODag(self.path)", "','\\\\n') node.attr['name'] = '' node.attr['shape'] = 'Mrecord' node.attr['style'] = 'filled'", "def orange_obo(self): \"\"\"The ontology loaded by the `orange` library. *", "pip install pygraphviz \"\"\" g = self.orange_obo.to_graphviz() assert g.is_directed() assert", "\"\"\"The ontology loaded by the `orange` library. * http://orange.biolab.si *", "% e if isinstance(root, int): root = \"ENVO:%08d\" % root", "until they update PyPI. * https://bitbucket.org/biolab/orange-network/ * http://orange-network.readthedocs.org/en/latest/ To install:", "install: $ pip install pygraphviz \"\"\" g = self.orange_obo.to_graphviz() assert", "$ pip install orange-network \"\"\" return self.orange_obo.to_network() @property_cached def pygraphviz(self):", "orig_txt = [line.rstrip('\\n') for line in open(path, 'r') if line]", "seqenv.common.cache import property_cached # Third party modules # import sh,", "they update PyPI. * https://bitbucket.org/biolab/orange-network/ * http://orange-network.readthedocs.org/en/latest/ To install: $", "return g # --------------------------- In this section --------------------------- # #", "int): e = \"ENVO:%08d\" % e if isinstance(root, int): root", "# Pygraphviz # print \"pygraphviz: \" print self.pygraphviz[e] print self.pygraphviz.successors(e)", "envo id to each node, uncomment: #envo = node.attr['label'] #node.attr['label']", "pip install networkx \"\"\" g = self.orange_obo.to_networkx() assert networkx.is_directed_acyclic_graph(g) return", "Auto conversion # if isinstance(e, int): e = \"ENVO:%08d\" %", "for line in legend_txt.split('\\n') if line] new_text = '\\n'.join(new_text +", "root # Return # return e in networkx.ancestors(self.networkx, root) #", "of the ENVO ontology from the OBO file's path. Other", "a networkx DiGraph object. Outputs a pygraphviz AGraph object.\"\"\" g", "--------------------------- # # descends def descends(self, e, root): \"\"\"Does the", "# Built-in modules # # Internal modules # from seqenv", "* https://github.com/tanghaibao/goatools To install: $ pip install goatools \"\"\" from", "[color=red]; key:i2:e -> key2:i2:w [color=blue]; key:i3:e -> key2:i3:w [color=turquoise4]; }\"\"\"", "self.pygraphviz[e] print self.pygraphviz.successors(e) print self.pygraphviz.predecessors(e) print self.pygraphviz.get_node(e) # Networkx #", "node.attr['shape'] = 'Mrecord' node.attr['style'] = 'filled' # To add the", "text = node.attr['name'] node.attr['label'] = text.replace(' ','\\\\n') node.attr['name'] = ''", "file.\"\"\" sh.dot(in_path, '-Tpdf', '-o', out_path) # --------------------------- In this section", "# goatools # orange_network # pygraphviz # networkx @property_cached def", "\"\"\" digraph { rankdir=LR node [shape=plaintext,fontname=\"helvetica\"] subgraph cluster_01 { label", "@property_cached def orange_obo(self): \"\"\"The ontology loaded by the `orange` library.", "def goatools(self): \"\"\"The network loaded into goatools' format. * https://github.com/tanghaibao/goatools", "cellspacing=\"0\" cellborder=\"0\"> <tr><td align=\"right\" port=\"i1\">Is</td></tr> <tr><td align=\"right\" port=\"i2\">Part</td></tr> <tr><td align=\"right\"", "# # print_test # draw_with_networkx # draw_with_pygraphviz def print_test(self, e=None):", "for line in open(path, 'r') if line] new_text = [line.lstrip()", "return g def add_style(self, g): \"\"\"Input a pygraphviz AGraph object.", "out_path): \"\"\"Input a path to a dot file.\"\"\" sh.dot(in_path, '-Tpdf',", "path to a dot file.\"\"\" sh.dot(in_path, '-Tpdf', '-o', out_path) #", "+ 'data_envo/envo.obo' self.path = path # --------------------------- In this section", "network converted to `pygraphviz` format. * http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/ To install: $", "= weights[envo] color = matplotlib.colors.rgb2hex((1.0, 1.0 - weight, 0.0)) node.attr['fillcolor']", "add the envo id to each node, uncomment: #envo =", "if path is None: path = module_dir + 'data_envo/envo.obo' self.path", "* graphviz: http://graphviz.readthedocs.org/en/latest/api.html#digraph * pydot: https://github.com/erocarrera/pydot \"\"\" def __init__(self, path=None):", "up to the root. Outputs a networkx DiGraph object.\"\"\" #", "color = matplotlib.colors.rgb2hex((1.0, 1.0 - weight, 0.0)) node.attr['fillcolor'] = color", "the envo id to each node, uncomment: #envo = node.attr['label']", "conversion # if isinstance(e, int): e = \"ENVO:%08d\" % e", "g for envo in weights: node = g.get_node(envo) weight =", "Return # return e in networkx.ancestors(self.networkx, root) # --------------------------- In", "port=\"i2\">Part</td></tr> <tr><td align=\"right\" port=\"i3\">Located</td></tr> </table>>]; key2 [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\"", "this section --------------------------- # # test # get_subgraph # add_weights", "handle.write(new_text) def draw_to_pdf(self, in_path, out_path): \"\"\"Input a path to a", "weigh more\"; key [label=<<table border=\"0\" cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td align=\"right\"", "= matplotlib.colors.rgb2hex((1.0, 1.0 - weight, 0.0)) node.attr['fillcolor'] = color return", "pyplot networkx.draw(g) pyplot.savefig(path) pyplot.close() def draw_with_pygraphviz(self, g, path): \"\"\"Input a", "'' node.attr['shape'] = 'Mrecord' node.attr['style'] = 'filled' # To add", "def draw_with_pygraphviz(self, g, path): \"\"\"Input a pygraphviz AGraph object.\"\"\" with", "cellpadding=\"2\" cellspacing=\"0\" cellborder=\"0\"> <tr><td port=\"i1\">a</td></tr> <tr><td port=\"i2\">of</td></tr> <tr><td port=\"i3\">in</td></tr> </table>>];", "dot file.\"\"\" legend_txt = \"\"\" digraph { rankdir=LR node [shape=plaintext,fontname=\"helvetica\"]", "AGraph object. Outputs a pygraphviz AGraph object.\"\"\" for node in", "to `pygraphviz` format. * http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/ To install: $ pip install", "modules # import sh, networkx import matplotlib.colors # A list", "--------------------------- In this section --------------------------- # # orange_obo # goatools", "DiGraph object.\"\"\" from matplotlib import pyplot networkx.draw(g) pyplot.savefig(path) pyplot.close() def", "mode # if envos is None: envos = test_envos #" ]
[ "Possible values include: \"undefined\", \"false\", \"true\". :type result: str or", "to specify whether the fallback route is enabled. :type is_enabled:", "__init__( self, **kwargs ): super(ExportDevicesRequest, self).__init__(**kwargs) self.export_blob_container_uri = kwargs['export_blob_container_uri'] self.exclude_keys", "} def __init__( self, **kwargs ): super(IotHubSkuInfo, self).__init__(**kwargs) self.name =", "\"true\". :type result: str or ~azure.mgmt.iothub.v2019_11_04.models.TestResultStatus :param details: Detailed result", "be either primary or secondary. The primary region is where", "'exportBlobContainerUri', 'type': 'str'}, 'exclude_keys': {'key': 'excludeKeys', 'type': 'bool'}, } def", "'IotHubCapacity'}, } def __init__( self, **kwargs ): super(IotHubSkuDescription, self).__init__(**kwargs) self.resource_type", "ignored when sending a request. :ivar minimum: The minimum number", "before it is expired by the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-", "list of operations and a URL link to get the", "endpoint for file upload. All required parameters must be populated", "= kwargs.get('max_delivery_count', None) class Name(msrest.serialization.Model): \"\"\"Name of Iot Hub type.", "{'key': 'body', 'type': 'str'}, 'app_properties': {'key': 'appProperties', 'type': '{str}'}, 'system_properties':", "be populated in order to send to Azure. :param name:", "= kwargs['route'] self.twin = kwargs.get('twin', None) class TestRouteResult(msrest.serialization.Model): \"\"\"Result of", "the tags on an IoT Hub instance. :param tags: A", "will be ignored when sending a request. :ivar job_id: The", "but can be reordered. :type file_name_format: str :param batch_frequency_in_seconds: Time", "str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuTier :param capacity: The number of provisioned IoT", "Storage endpoint for file upload. All required parameters must be", "\"\"\"Public representation of one of the locations where a resource", "value for the quota metric. :vartype current_value: long :ivar max_value:", ":vartype id: str :ivar name: The name of the certificate.", "__init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRuleListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None)", "'type': 'int'}, 'name': {'key': 'name', 'type': 'Name'}, } def __init__(", "used to serialize messages to blobs. Supported values are 'avro',", "For example, DeviceMessages. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\",", "kwargs.get('resource_group', None) class RoutingStorageContainerProperties(msrest.serialization.Model): \"\"\"The properties related to a storage", "resource. :vartype resource_type: str :param sku: Required. The type of", "that matched. :param properties: Properties of routes that matched. :type", "objects with a next link. Variables are only populated by", "str \"\"\" _validation = { 'partition_ids': {'readonly': True}, 'path': {'readonly':", "'properties', 'type': 'RouteProperties'}, } def __init__( self, **kwargs ): super(MatchedRoute,", "the resource group of the event hub endpoint. :type resource_group:", "reason for the failure. :vartype failure_reason: str :ivar status_message: The", "self.reported = kwargs.get('reported', None) class SharedAccessSignatureAuthorizationRule(msrest.serialization.Model): \"\"\"The properties of an", "} def __init__( self, **kwargs ): super(UserSubscriptionQuotaListResult, self).__init__(**kwargs) self.value =", "self.result = kwargs.get('result', None) self.details = kwargs.get('details', None) class TestRouteResultDetails(msrest.serialization.Model):", "hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#cloud-to-device-messages. :type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count: The", "**kwargs ): super(RoutingTwin, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) self.properties =", ":vartype maximum: long :ivar default: The default number of units.", "ignored when sending a request. :ivar name: Operation name: {provider}/{resource}/{read", "str or ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorSeverity :param location: Location where the route error", "The name of the shared access policy. :type key_name: str", "between 60 and 720 seconds. Default value is 300 seconds.", "'str'}, 'message': {'key': 'message', 'type': 'str'}, 'details': {'key': 'details', 'type':", "name: The name of the certificate. :vartype name: str :ivar", ":type ip_filter_rules: list[~azure.mgmt.iothub.v2019_11_04.models.IpFilterRule] :ivar provisioning_state: The provisioning state. :vartype provisioning_state:", "request. :param value: The list of shared access policies. :type", "IotHub type. :type unit: str :param current_value: Current number of", "= kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) class RoutingMessage(msrest.serialization.Model): \"\"\"Routing", "self.maximum = None self.default = None self.scale_type = None class", "{'key': 'maxChunkSizeInBytes', 'type': 'int'}, 'encoding': {'key': 'encoding', 'type': 'str'}, }", "or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition which is evaluated in", "by the server, and will be ignored when sending a", "file_name_format: File name format for the blob. Default format is", "long :param partition_count: The number of partitions for receiving device-to-cloud", "\"export\", \"import\", \"backup\", \"readDeviceProperties\", \"writeDeviceProperties\", \"updateDeviceConfiguration\", \"rebootDevice\", \"factoryResetDevice\", \"firmwareUpdate\". :vartype", "'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'}, 'capacity': {'key': 'capacity', 'type': 'IotHubCapacity'},", "billing tier for the IoT hub. Possible values include: \"Free\",", "storage. Value should be between 10485760(10MB) and 524288000(500MB). Default value", "super(RouteErrorPosition, self).__init__(**kwargs) self.line = kwargs.get('line', None) self.column = kwargs.get('column', None)", "to serialize messages to blobs. Supported values are 'avro', 'avrodeflate',", "message for the job. :vartype status_message: str :ivar parent_job_id: The", "to send to Azure. :ivar resource_type: The type of the", "'type': 'str'}, 'start_time_utc': {'key': 'startTimeUtc', 'type': 'rfc-1123'}, 'end_time_utc': {'key': 'endTimeUtc',", "= { 'desired': {'key': 'desired', 'type': 'object'}, 'reported': {'key': 'reported',", "self.next_link = None class IotHubSkuInfo(msrest.serialization.Model): \"\"\"Information about the SKU of", ":param enable_file_upload_notifications: If True, file upload notifications are enabled. :type", "authorization_policies: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :param ip_filter_rules: The IP filter rules. :type ip_filter_rules:", "maximum: The maximum number of units. :vartype maximum: long :ivar", "_validation = { 'provisioning_state': {'readonly': True}, 'state': {'readonly': True}, 'host_name':", "'message': {'key': 'message', 'type': 'str'}, 'severity': {'key': 'severity', 'type': 'str'},", "{'required': True, 'min_items': 1}, } _attribute_map = { 'key': {'key':", "request. All required parameters must be populated in order to", "properties for the file upload notification queue. :type messaging_endpoints: dict[str,", "self.value = kwargs.get('value', None) self.localized_value = kwargs.get('localized_value', None) class Operation(msrest.serialization.Model):", "= kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id = kwargs.get('subscription_id', None) self.resource_group", "All required parameters must be populated in order to send", "content. :type certificate: str \"\"\" _attribute_map = { 'certificate': {'key':", "condition are routed to. Currently only 1 endpoint is allowed.", "\"\"\" _validation = { 'connection_string': {'required': True}, 'container_name': {'required': True},", "for the retrial period. See IoT Hub metrics to identify", "rules. This list does not include the built-in Event Hubs", "None) self.batch_frequency_in_seconds = kwargs.get('batch_frequency_in_seconds', None) self.max_chunk_size_in_bytes = kwargs.get('max_chunk_size_in_bytes', None) self.encoding", "**kwargs ): super(IotHubDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link =", "connection_string: str :param name: Required. The name that identifies this", "value: list[~azure.mgmt.iothub.v2019_11_04.models.EventHubConsumerGroupInfo] :ivar next_link: The next link. :vartype next_link: str", "The list of storage container endpoints that IoT hub routes", "or ~azure.mgmt.iothub.v2019_11_04.models.AccessRights \"\"\" _validation = { 'key_name': {'required': True}, 'rights':", "class IotHubDescription(Resource): \"\"\"The description of the IoT hub. Variables are", "'bool'}, 'cloud_to_device': {'key': 'cloudToDevice', 'type': 'CloudToDeviceProperties'}, 'comments': {'key': 'comments', 'type':", "X509 Certificate. :param certificate: base-64 representation of the X509 leaf", "the message. :type endpoint_names: list[str] \"\"\" _validation = { 'key':", "type. :vartype type: str :ivar etag: The etag. :vartype etag:", "updated: The certificate's last update date and time. :vartype updated:", "unique. :type name: str :param source: Required. The source that", "None class CertificateListDescription(msrest.serialization.Model): \"\"\"The JSON-serialized array of Certificate objects. :param", "'event_hubs': {'key': 'eventHubs', 'type': '[RoutingEventHubProperties]'}, 'storage_containers': {'key': 'storageContainers', 'type': '[RoutingStorageContainerProperties]'},", "value: list[~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthData] :ivar next_link: Link to more results. :vartype next_link:", "= kwargs.get('id', None) self.type = kwargs.get('type', None) self.unit = kwargs.get('unit',", "kwargs.get('certificate', None) class CertificateDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables are only", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload. :type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count: The number of", "queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type max_delivery_count: int \"\"\" _validation =", "properties. :type desired: object :param reported: Twin desired properties. :type", "certificate has been verified. :vartype is_verified: bool :ivar created: The", "): super(CertificateBodyDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None) class CertificateDescription(msrest.serialization.Model): \"\"\"The", "be ignored when sending a request. :ivar total_device_count: The total", "self).__init__(**kwargs) self.result = kwargs.get('result', None) self.details = kwargs.get('details', None) class", "order to apply the fallback route. If the condition is", "the SAS URI generated by IoT Hub for file upload", "{'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, }", "'connectionString', 'type': 'str'}, 'container_name': {'key': 'containerName', 'type': 'str'}, } def", "'resource', 'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'}, 'description': {'key':", "True}, } _attribute_map = { 'sas_ttl_as_iso8601': {'key': 'sasTtlAsIso8601', 'type': 'duration'},", "can only include alphanumeric characters, periods, underscores, hyphens, has a", "as DeviceMessages. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\".", "{'key': 'value', 'type': '[Operation]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, }", "def __init__( self, **kwargs ): super(TestRouteResult, self).__init__(**kwargs) self.result = kwargs.get('result',", "list IoT Hub operations. It contains a list of operations", "the enrichment property. :type value: str :param endpoint_names: Required. The", "= kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) class RoutingServiceBusTopicEndpointProperties(msrest.serialization.Model): \"\"\"The", ":param value: Required. The value for the enrichment property. :type", "self.provisioning_state = None self.state = None self.host_name = None self.event_hub_endpoints", "None) class IpFilterRule(msrest.serialization.Model): \"\"\"The IP filter rules for the IoT", "{ 'partition_ids': {'readonly': True}, 'path': {'readonly': True}, 'endpoint': {'readonly': True},", "'column': {'key': 'column', 'type': 'int'}, } def __init__( self, **kwargs", "\"\"\"The properties of the Azure Storage endpoint for file upload.", "{'key': 'rights', 'type': 'str'}, } def __init__( self, **kwargs ):", "super(CertificateDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id = None self.name", "and time. :vartype updated: ~datetime.datetime :param certificate: The certificate content.", "\"None\". :vartype scale_type: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubScaleType \"\"\" _validation = {", "~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo :param capacity: Required. IotHub capacity. :type capacity: ~azure.mgmt.iothub.v2019_11_04.models.IotHubCapacity \"\"\"", "all devices in the hub. All required parameters must be", "'severity', 'type': 'str'}, 'location': {'key': 'location', 'type': 'RouteErrorRange'}, } def", "'str'}, } def __init__( self, **kwargs ): super(OperationDisplay, self).__init__(**kwargs) self.provider", "a request. :ivar name_available: The value which indicates whether the", "self.routing = kwargs.get('routing', None) self.storage_endpoints = kwargs.get('storage_endpoints', None) self.messaging_endpoints =", "): super(CertificateWithNonceDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id = None", "{'readonly': True}, 'etag': {'readonly': True}, } _attribute_map = { 'properties':", "} def __init__( self, **kwargs ): super(EventHubConsumerGroupsListResult, self).__init__(**kwargs) self.value =", "sending a request. :param properties: The tags. :type properties: dict[str,", "{ 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'name': {'key': 'name', 'type':", "update calls for the IoT hub. :type event_hub_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.EventHubProperties]", "{'maximum': 720, 'minimum': 60}, 'max_chunk_size_in_bytes': {'maximum': 524288000, 'minimum': 10485760}, }", "'message', 'type': 'RoutingMessage'}, 'twin': {'key': 'twin', 'type': 'RoutingTwin'}, } def", "sending a request. All required parameters must be populated in", "the endpoint is not accepting messages as expected and IoT", "routed to the built-in eventhub endpoint. :type fallback_route: ~azure.mgmt.iothub.v2019_11_04.models.FallbackRouteProperties :param", "super(RoutingServiceBusQueueEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id =", "self.enable_file_upload_notifications = kwargs.get('enable_file_upload_notifications', None) self.cloud_to_device = kwargs.get('cloud_to_device', None) self.comments =", "{'key': 'details', 'type': 'str'}, } def __init__( self, **kwargs ):", "{'key': 'type', 'type': 'str'}, 'unit': {'key': 'unit', 'type': 'str'}, 'current_value':", "**kwargs ): super(RoutingEndpoints, self).__init__(**kwargs) self.service_bus_queues = kwargs.get('service_bus_queues', None) self.service_bus_topics =", ":vartype expiry: ~datetime.datetime :ivar thumbprint: The certificate's thumbprint. :vartype thumbprint:", "= kwargs['input_blob_container_uri'] self.output_blob_container_uri = kwargs['output_blob_container_uri'] class IotHubCapacity(msrest.serialization.Model): \"\"\"IoT Hub capacity", "_validation = { 'partition_ids': {'readonly': True}, 'path': {'readonly': True}, 'endpoint':", "kwargs.get('system_properties', None) class RoutingProperties(msrest.serialization.Model): \"\"\"The routing related properties of the", "storage account. :type resource_group: str :param container_name: Required. The name", "messages to, based on the routing rules. :type service_bus_topics: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusTopicEndpointProperties]", "'str'}, 'features': {'key': 'features', 'type': 'str'}, 'locations': {'key': 'locations', 'type':", "'app_properties': {'key': 'appProperties', 'type': '{str}'}, 'system_properties': {'key': 'systemProperties', 'type': '{str}'},", "= { 'endpoint_id': {'key': 'endpointId', 'type': 'str'}, 'health_status': {'key': 'healthStatus',", "{'readonly': True}, 'start_time_utc': {'readonly': True}, 'end_time_utc': {'readonly': True}, 'type': {'readonly':", ":type service_bus_topics: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusTopicEndpointProperties] :param event_hubs: The list of Event Hubs", "Event Hub-compatible endpoint used by the IoT hub. Variables are", "the condition is not provided it will evaluate to true", "= { 'value': {'key': 'value', 'type': '[JobResponse]'}, 'next_link': {'key': 'nextLink',", "type. :type value: str :param localized_value: Localized value of name.", "def __init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRule, self).__init__(**kwargs) self.key_name = kwargs['key_name']", "kwargs['container_name'] class TagsResource(msrest.serialization.Model): \"\"\"A container holding only the Tags for", "as $default. Specifying more than one storage account causes an", "'type': '[SharedAccessSignatureAuthorizationRule]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(", "): super(IotHubQuotaMetricInfoListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None", "description: str \"\"\" _validation = { 'provider': {'readonly': True}, 'resource':", "'value': {'key': 'value', 'type': 'str'}, 'endpoint_names': {'key': 'endpointNames', 'type': '[str]'},", "This is an optional parameter. :param tags: A set of", "or ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorSeverity :param location: Location where the route error happened.", "'[IpFilterRule]'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'state': {'key': 'state', 'type':", "The name of the route. The name can only include", "Possible values include: \"unknown\", \"healthy\", \"unhealthy\", \"dead\". :type health_status: str", "Required. Used to specify whether the fallback route is enabled.", "True}, 'tier': {'readonly': True}, } _attribute_map = { 'name': {'key':", "{'readonly': True}, } _attribute_map = { 'name': {'key': 'name', 'type':", "self.endpoint_names = kwargs['endpoint_names'] class ErrorDetails(msrest.serialization.Model): \"\"\"Error details. Variables are only", "max_chunk_size_in_bytes: int :param encoding: Encoding that is used to serialize", "thumbprint: The certificate's thumbprint. :vartype thumbprint: str :ivar is_verified: Determines", "The name of the host. :vartype host_name: str :param event_hub_endpoints:", "\"\"\"The description of an X509 CA Certificate including the challenge", "policies you can use to secure a connection to the", "= { 'result': {'key': 'result', 'type': 'str'}, 'details': {'key': 'details',", "to send to Azure. :param message: Routing message. :type message:", "key: Required. The key or name for the enrichment property.", "_attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } def", "RoutingServiceBusTopicEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to service bus topic endpoint types.", "'type': 'int'}, } def __init__( self, **kwargs ): super(RouteErrorPosition, self).__init__(**kwargs)", "= None self.name = None self.type = None self.etag =", "route when none of the conditions specified in the 'routes'", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to- device-messages. :type max_delivery_count: int :param default_ttl_as_iso8601: The default time", "reordered. :type file_name_format: str :param batch_frequency_in_seconds: Time interval at which", "'tags', 'type': '{str}'}, } def __init__( self, **kwargs ): super(TagsResource,", "subject name. :vartype subject: str :ivar expiry: The certificate's expiration", "and will be ignored when sending a request. :ivar name_available:", "self.name = kwargs.get('name', None) class UserSubscriptionQuotaListResult(msrest.serialization.Model): \"\"\"Json-serialized array of User", "sending a request. :ivar minimum: The minimum number of units.", "list results if there are any. :vartype next_link: str \"\"\"", "or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuTier :param capacity: The number of provisioned IoT Hub", "self, **kwargs ): super(EnrichmentProperties, self).__init__(**kwargs) self.key = kwargs['key'] self.value =", ":param subscription_id: The subscription identifier of the event hub endpoint.", "'start_time_utc': {'key': 'startTimeUtc', 'type': 'rfc-1123'}, 'end_time_utc': {'key': 'endTimeUtc', 'type': 'rfc-1123'},", ":param message: The detailed reason message. :type message: str \"\"\"", ":param container_name: Required. The name of the root container where", ":param storage_containers: The list of storage container endpoints that IoT", "name: ~azure.mgmt.iothub.v2019_11_04.models.Name \"\"\" _attribute_map = { 'id': {'key': 'id', 'type':", "'limit', 'type': 'int'}, 'name': {'key': 'name', 'type': 'Name'}, } def", "= None self.host_name = None self.event_hub_endpoints = kwargs.get('event_hub_endpoints', None) self.routing", "'str'}, } def __init__( self, **kwargs ): super(EventHubConsumerGroupsListResult, self).__init__(**kwargs) self.value", "'type': '{MessagingEndpointProperties}'}, 'enable_file_upload_notifications': {'key': 'enableFileUploadNotifications', 'type': 'bool'}, 'cloud_to_device': {'key': 'cloudToDevice',", "list[~azure.mgmt.iothub.v2019_11_04.models.EventHubConsumerGroupInfo] :ivar next_link: The next link. :vartype next_link: str \"\"\"", ":vartype partition_ids: list[str] :ivar path: The Event Hub-compatible name. :vartype", "of name. :type localized_value: str \"\"\" _attribute_map = { 'value':", "messages that satisfy the condition are routed to. Currently only", "a request. :param properties: The tags. :type properties: dict[str, str]", "been verified. :vartype is_verified: bool :ivar created: The certificate's create", "kwargs.get('max_chunk_size_in_bytes', None) self.encoding = kwargs.get('encoding', None) class RoutingTwin(msrest.serialization.Model): \"\"\"Twin reference", "when sending a request. :ivar name: The name of the", "str :ivar operation: Name of the operation. :vartype operation: str", "self, **kwargs ): super(ImportDevicesRequest, self).__init__(**kwargs) self.input_blob_container_uri = kwargs['input_blob_container_uri'] self.output_blob_container_uri =", "= kwargs['is_enabled'] class FeedbackProperties(msrest.serialization.Model): \"\"\"The properties of the feedback queue", "str :ivar type: the resource type. :vartype type: str :ivar", "'type': '[RoutingEventHubProperties]'}, 'storage_containers': {'key': 'storageContainers', 'type': '[RoutingStorageContainerProperties]'}, } def __init__(", "be updated to healthy when IoT Hub has established an", "_attribute_map = { 'tags': {'key': 'tags', 'type': 'object'}, 'properties': {'key':", "sending a request. :ivar value: List of IoT Hub operations", "of Event Hub-compatible consumer group names with a next link.", "super(IotHubDescription, self).__init__(**kwargs) self.etag = kwargs.get('etag', None) self.properties = kwargs.get('properties', None)", "a request. :param authorization_policies: The shared access policies you can", "name: Operation name: {provider}/{resource}/{read | write | action | delete}.", "'type': 'RoutingEndpoints'}, 'routes': {'key': 'routes', 'type': '[RouteProperties]'}, 'fallback_route': {'key': 'fallbackRoute',", "'type': 'str'}, 'primary_key': {'key': 'primaryKey', 'type': 'str'}, 'secondary_key': {'key': 'secondaryKey',", ":param routes: The list of user-provided routing rules that the", "description of an X509 CA Certificate including the challenge nonce", "CertificatePropertiesWithNonce(msrest.serialization.Model): \"\"\"The description of an X509 CA Certificate including the", "eventually consistent state of health. The 'dead' status shows that", "= None class CertificateListDescription(msrest.serialization.Model): \"\"\"The JSON-serialized array of Certificate objects.", "'endpoint_names': {'key': 'endpointNames', 'type': '[str]'}, } def __init__( self, **kwargs", "The maximum value of the quota metric. :vartype max_value: long", "time the job stopped processing. :vartype end_time_utc: ~datetime.datetime :ivar type:", "status == failed, this string containing the reason for the", "This list does not include the built-in Event Hubs endpoint.", "be lost if the code is regenerated. # -------------------------------------------------------------------------- from", "The list of Service Bus topic endpoints that the IoT", "'type': 'rfc-1123'}, 'updated': {'key': 'updated', 'type': 'rfc-1123'}, 'certificate': {'key': 'certificate',", "'type', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'failure_reason': {'key':", "None self.display = kwargs.get('display', None) class OperationDisplay(msrest.serialization.Model): \"\"\"The object that", "{'key': 'inputBlobContainerUri', 'type': 'str'}, 'output_blob_container_uri': {'key': 'outputBlobContainerUri', 'type': 'str'}, }", "{'key': 'etag', 'type': 'str'}, } def __init__( self, **kwargs ):", "seconds. :type batch_frequency_in_seconds: int :param max_chunk_size_in_bytes: Maximum number of bytes", "for the failure. :vartype failure_reason: str :ivar status_message: The status", "= { 'value': {'key': 'value', 'type': '[EventHubConsumerGroupInfo]'}, 'next_link': {'key': 'nextLink',", "period of time for which the SAS URI generated by", "\"\"\" _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'type':", "{'key': 'encoding', 'type': 'str'}, } def __init__( self, **kwargs ):", "} def __init__( self, **kwargs ): super(TestAllRoutesInput, self).__init__(**kwargs) self.routing_source =", ":type severity: str or ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorSeverity :param location: Location where the", "devices in the identity registry. :vartype enabled_device_count: long :ivar disabled_device_count:", "def __init__( self, **kwargs ): super(CertificateWithNonceDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties',", "of the EventHubConsumerGroupInfo object. Variables are only populated by the", "**kwargs ): super(EventHubProperties, self).__init__(**kwargs) self.retention_time_in_days = kwargs.get('retention_time_in_days', None) self.partition_count =", "True}, 'sku': {'required': True}, 'capacity': {'required': True}, } _attribute_map =", "'str'}, } def __init__( self, **kwargs ): super(IotHubNameAvailabilityInfo, self).__init__(**kwargs) self.name_available", "\"RegistryRead, RegistryWrite, DeviceConnect\", \"RegistryRead, ServiceConnect, DeviceConnect\", \"RegistryWrite, ServiceConnect, DeviceConnect\", \"RegistryRead,", "'type': 'str'}, 'role': {'key': 'role', 'type': 'str'}, } def __init__(", "base-64 representation of the X509 leaf certificate .cer file or", "of the feedback queue for cloud-to-device messages. :type feedback: ~azure.mgmt.iothub.v2019_11_04.models.FeedbackProperties", "True}, } _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'},", "): super(IotHubSkuDescription, self).__init__(**kwargs) self.resource_type = None self.sku = kwargs['sku'] self.capacity", "'endpoints', 'type': 'RoutingEndpoints'}, 'routes': {'key': 'routes', 'type': '[RouteProperties]'}, 'fallback_route': {'key':", "self.scale_type = None class Resource(msrest.serialization.Model): \"\"\"The common properties of an", ":vartype name: str :param display: The object that represents the", "'type': 'rfc-1123'}, 'certificate': {'key': 'certificate', 'type': 'str'}, } def __init__(", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :param endpoints: The properties related to the custom", "your IoT hub routes messages based on the routing rules.", "} def __init__( self, **kwargs ): super(RoutingEndpoints, self).__init__(**kwargs) self.service_bus_queues =", "date and time. :vartype updated: ~datetime.datetime :param certificate: The certificate", "kwargs.get('ip_filter_rules', None) self.provisioning_state = None self.state = None self.host_name =", "= { 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'name': {'key': 'name',", "parameters are mandatory but can be reordered. :type file_name_format: str", "time. :vartype expiry: ~datetime.datetime :ivar thumbprint: The certificate's thumbprint. :vartype", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param endpoint_names: Required. The list of", "self.type = None self.location = kwargs['location'] self.tags = kwargs.get('tags', None)", "str :param properties: IotHub properties. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.IotHubProperties :param sku:", "r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly': True}, 'location': {'required': True}, } _attribute_map =", "sending a request. :ivar subject: The certificate's subject name. :vartype", "**kwargs ): super(IotHubNameAvailabilityInfo, self).__init__(**kwargs) self.name_available = None self.reason = None", "class MatchedRoute(msrest.serialization.Model): \"\"\"Routes that matched. :param properties: Properties of routes", ":param tags: A set of tags. Resource tags. :type tags:", "results if there are any. :vartype next_link: str \"\"\" _validation", "= kwargs.get('properties', None) self.id = None self.name = None self.etag", "to be present in the dictionary while making create or", "of an unhealthy endpoint will be updated to healthy when", ":vartype updated: ~datetime.datetime :param certificate: The certificate content. :type certificate:", "The array of Certificate objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.CertificateDescription] \"\"\" _attribute_map", "name: str :ivar etag: The entity tag. :vartype etag: str", "resource group of the event hub endpoint. :type resource_group: str", "'type': 'str'}, 'endpoint': {'key': 'endpoint', 'type': 'str'}, } def __init__(", "'type': 'str'}, } def __init__( self, **kwargs ): super(EventHubProperties, self).__init__(**kwargs)", "resource_type: str :param sku: Required. The type of the resource.", "message: Route error message. :type message: str :param severity: Severity", "= kwargs['is_enabled'] class RoutingEndpoints(msrest.serialization.Model): \"\"\"The properties related to the custom", "resource type. :vartype type: str :param location: Required. The resource", "= { 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'source': {'required': True},", "user-provided routing rules that the IoT hub uses to route", "operation. :type display: ~azure.mgmt.iothub.v2019_11_04.models.OperationDisplay \"\"\" _validation = { 'name': {'readonly':", "the conditions specified in the 'routes' section get routed to", "self).__init__(**kwargs) self.value = None self.next_link = None class RegistryStatistics(msrest.serialization.Model): \"\"\"Identity", "self.current_value = None self.max_value = None class IotHubQuotaMetricInfoListResult(msrest.serialization.Model): \"\"\"The JSON-serialized", "{ 'next_link': {'readonly': True}, } _attribute_map = { 'value': {'key':", "URL to get the next set of operation list results", "The status of an unhealthy endpoint will be updated to", "'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key':", "str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition that is evaluated", "\"\"\"RoutingTwinProperties. :param desired: Twin desired properties. :type desired: object :param", "one route. :param result: Result of testing route. Possible values", "sending a request. :param value: JSON-serialized array of Endpoint health", "{'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, }", "order to send to Azure. :ivar resource_type: The type of", "The certificate's create date and time. :vartype created: ~datetime.datetime :ivar", "to endpoints. All required parameters must be populated in order", "'type': '[RouteCompilationError]'}, } def __init__( self, **kwargs ): super(TestRouteResultDetails, self).__init__(**kwargs)", "__init__( self, **kwargs ): super(FailoverInput, self).__init__(**kwargs) self.failover_region = kwargs['failover_region'] class", "failover_region: str \"\"\" _validation = { 'failover_region': {'required': True}, }", "{ 'value': {'key': 'value', 'type': '[JobResponse]'}, 'next_link': {'key': 'nextLink', 'type':", "such as DeviceMessages. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\",", "'long'}, } def __init__( self, **kwargs ): super(IotHubQuotaMetricInfo, self).__init__(**kwargs) self.name", ":type name: str :param source: Required. The source to which", "encoding: Encoding that is used to serialize messages to blobs.", "{'key': 'operation', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, }", ":param partition_count: The number of partitions for receiving device-to-cloud messages", "message. :type message: str \"\"\" _validation = { 'name_available': {'readonly':", "= { 'value': {'key': 'value', 'type': '[CertificateDescription]'}, } def __init__(", "'type': 'RoutingMessage'}, 'route': {'key': 'route', 'type': 'RouteProperties'}, 'twin': {'key': 'twin',", "= { 'id': {'readonly': True}, 'name': {'readonly': True}, 'etag': {'readonly':", "group names with a next link. Variables are only populated", "'type': 'str'}, 'event_hub_endpoints': {'key': 'eventHubEndpoints', 'type': '{EventHubProperties}'}, 'routing': {'key': 'routing',", "\"undefined\", \"false\", \"true\". :type result: str or ~azure.mgmt.iothub.v2019_11_04.models.TestResultStatus :param details:", "{'key': 'isVerified', 'type': 'bool'}, 'created': {'key': 'created', 'type': 'rfc-1123'}, 'updated':", "characters, periods, underscores, hyphens and has a maximum length of", "endpoint: The Event Hub-compatible endpoint. :vartype endpoint: str \"\"\" _validation", "in the project root for license information. # Code generated", "'message': {'key': 'message', 'type': 'RoutingMessage'}, 'route': {'key': 'route', 'type': 'RouteProperties'},", "to send to Azure. :param name: Required. The name of", "account. :type resource_group: str :param container_name: Required. The name of", "'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location',", "None) self.next_link = None class EnrichmentProperties(msrest.serialization.Model): \"\"\"The properties of an", "{ 'key_name': {'required': True}, 'rights': {'required': True}, } _attribute_map =", "\"\"\"The properties related to a storage container endpoint. All required", "Hub for file upload is valid. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload#file-upload-notification-configuration-options. :type", "= None class IotHubQuotaMetricInfoListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubQuotaMetricInfo objects", "to send to Azure. :param filter_name: Required. The name of", ":type encoding: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerPropertiesEncoding \"\"\" _validation = { 'connection_string':", "order to send to Azure. :param key_name: Required. The name", "StorageEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the Azure Storage endpoint for file", "is an optional parameter. When this property is not set,", "bus topic endpoint types. All required parameters must be populated", "number of times the IoT hub attempts to deliver a", "hub. Possible values include: \"None\", \"DeviceManagement\". :type features: str or", ":param system_properties: System properties. :type system_properties: dict[str, str] \"\"\" _attribute_map", "): super(RoutingEndpoints, self).__init__(**kwargs) self.service_bus_queues = kwargs.get('service_bus_queues', None) self.service_bus_topics = kwargs.get('service_bus_topics',", "The object that represents the operation. :type display: ~azure.mgmt.iothub.v2019_11_04.models.OperationDisplay \"\"\"", "next_link: :vartype next_link: str \"\"\" _validation = { 'next_link': {'readonly':", ":type exclude_keys: bool \"\"\" _validation = { 'export_blob_container_uri': {'required': True},", "'str'}, 'endpoint_names': {'key': 'endpointNames', 'type': '[str]'}, 'is_enabled': {'key': 'isEnabled', 'type':", "shared access policies. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :ivar next_link: The next", "default_ttl_as_iso8601: The default time to live for cloud-to-device messages in", "Route error message. :type message: str :param severity: Severity of", "total_device_count: The total count of devices in the identity registry.", "properties indicating whether a given IoT hub name is available.", "class EventHubConsumerGroupInfo(msrest.serialization.Model): \"\"\"The properties of the EventHubConsumerGroupInfo object. Variables are", "__init__( self, **kwargs ): super(RoutingProperties, self).__init__(**kwargs) self.endpoints = kwargs.get('endpoints', None)", "IoT hub is currently provisioned. The secondary region is the", "= kwargs['export_blob_container_uri'] self.exclude_keys = kwargs['exclude_keys'] class FailoverInput(msrest.serialization.Model): \"\"\"Use to provide", "kwargs['input_blob_container_uri'] self.output_blob_container_uri = kwargs['output_blob_container_uri'] class IotHubCapacity(msrest.serialization.Model): \"\"\"IoT Hub capacity information.", "group identifier. :vartype id: str :ivar name: The Event Hub-compatible", "'type': 'CertificatePropertiesWithNonce'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name',", "None class MatchedRoute(msrest.serialization.Model): \"\"\"Routes that matched. :param properties: Properties of", "): super(JobResponseListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None", "str :param file_name_format: File name format for the blob. Default", "parameter. :param tags: A set of tags. Twin Tags. :type", "): super(FallbackRouteProperties, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.source = kwargs['source']", "and custom endpoints. See: https://aka.ms/telemetryoneventgrid. :type enrichments: list[~azure.mgmt.iothub.v2019_11_04.models.EnrichmentProperties] \"\"\" _attribute_map", "an X509 CA Certificate. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificateProperties :ivar id: The", "representation of one of the locations where a resource is", "name of the IoT hub to check. :type name: str", "and 720 seconds. Default value is 300 seconds. :type batch_frequency_in_seconds:", "'str'}, } def __init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRuleListResult, self).__init__(**kwargs) self.value", "built-in and custom endpoints. See: https://aka.ms/telemetryoneventgrid. :type enrichments: list[~azure.mgmt.iothub.v2019_11_04.models.EnrichmentProperties] \"\"\"", "'type': 'rfc-1123'}, 'verification_code': {'key': 'verificationCode', 'type': 'str'}, 'certificate': {'key': 'certificate',", "str :param secondary_key: The secondary key. :type secondary_key: str :param", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#device-to-cloud-messages. :type partition_count: int :ivar partition_ids: The partition ids", "values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type routing_source: str", "comments: str :param features: The capabilities and features enabled for", "Code generated by Microsoft (R) AutoRest Code Generator. # Changes", "rules for the IoT hub. All required parameters must be", "{ 'code': {'readonly': True}, 'http_status_code': {'readonly': True}, 'message': {'readonly': True},", "= { 'failover_region': {'key': 'failoverRegion', 'type': 'str'}, } def __init__(", "'type': 'long'}, 'scale_type': {'key': 'scaleType', 'type': 'str'}, } def __init__(", "get routed to the built-in eventhub endpoint. :type fallback_route: ~azure.mgmt.iothub.v2019_11_04.models.FallbackRouteProperties", "time for which the SAS URI generated by IoT Hub", "self, **kwargs ): super(EndpointHealthDataListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link", "given IoT hub name is available. Variables are only populated", "objects. :param value: The array of Certificate objects. :type value:", "time of the job. :vartype start_time_utc: ~datetime.datetime :ivar end_time_utc: The", "720 seconds. Default value is 300 seconds. :type batch_frequency_in_seconds: int", "'name': {'readonly': True}, 'etag': {'readonly': True}, 'type': {'readonly': True}, }", "not be the same as the actual topic name. :type", "{'key': 'fileNameFormat', 'type': 'str'}, 'batch_frequency_in_seconds': {'key': 'batchFrequencyInSeconds', 'type': 'int'}, 'max_chunk_size_in_bytes':", "sending a request. :param value: List of consumer groups objects.", "\"ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect\", \"RegistryRead, RegistryWrite, DeviceConnect\", \"RegistryRead, ServiceConnect,", "__init__( self, **kwargs ): super(CloudToDeviceProperties, self).__init__(**kwargs) self.max_delivery_count = kwargs.get('max_delivery_count', None)", "if any. :vartype parent_job_id: str \"\"\" _validation = { 'job_id':", "def __init__( self, **kwargs ): super(IotHubQuotaMetricInfo, self).__init__(**kwargs) self.name = None", "60}, 'max_chunk_size_in_bytes': {'maximum': 524288000, 'minimum': 10485760}, } _attribute_map = {", "= { 'id': {'readonly': True}, 'name': {'readonly': True, 'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'},", "challenge nonce issued for the Proof-Of-Possession flow. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificatePropertiesWithNonce", "exclude_keys: Required. The value indicating whether keys should be excluded", "id: str :ivar name: The resource name. :vartype name: str", "Hubs endpoints that IoT hub routes messages to, based on", "which blobs are written to storage. Value should be between", "applies to messages delivered to endpoints. All required parameters must", "value which indicates whether the provided name is available. :vartype", ":ivar endpoint: The Event Hub-compatible endpoint. :vartype endpoint: str \"\"\"", "the Azure region. :type location: str :param role: The role", "'type': 'str'}, 'severity': {'key': 'severity', 'type': 'str'}, 'location': {'key': 'location',", "kwargs.get('ttl_as_iso8601', None) self.max_delivery_count = kwargs.get('max_delivery_count', None) class Name(msrest.serialization.Model): \"\"\"Name of", "} def __init__( self, **kwargs ): super(RouteErrorRange, self).__init__(**kwargs) self.start =", "kwargs.get('value', None) class CertificateProperties(msrest.serialization.Model): \"\"\"The description of an X509 CA", "bus queue endpoint. :type resource_group: str \"\"\" _validation = {", ":type source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition which", "routes messages to, based on the routing rules. :type storage_containers:", "\"factoryResetDevice\", \"firmwareUpdate\". :vartype type: str or ~azure.mgmt.iothub.v2019_11_04.models.JobType :ivar status: The", ":ivar value: List of IoT Hub operations supported by the", "super(RegistryStatistics, self).__init__(**kwargs) self.total_device_count = None self.enabled_device_count = None self.disabled_device_count =", "to. Possible values include: \"primary\", \"secondary\". :type role: str or", "def __init__( self, **kwargs ): super(FallbackRouteProperties, self).__init__(**kwargs) self.name = kwargs.get('name',", "\"primary\", \"secondary\". :type role: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubReplicaRoleType \"\"\" _attribute_map =", "self.max_delivery_count = kwargs.get('max_delivery_count', None) class ImportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters", "= { 'key_name': {'required': True}, 'rights': {'required': True}, } _attribute_map", "super(CloudToDeviceProperties, self).__init__(**kwargs) self.max_delivery_count = kwargs.get('max_delivery_count', None) self.default_ttl_as_iso8601 = kwargs.get('default_ttl_as_iso8601', None)", ".pem file content. :type certificate: str \"\"\" _attribute_map = {", "long \"\"\" _validation = { 'name': {'required': True}, 'tier': {'readonly':", "details: str \"\"\" _validation = { 'code': {'readonly': True}, 'http_status_code':", "'scale_type': {'key': 'scaleType', 'type': 'str'}, } def __init__( self, **kwargs", "IoT hub routes the messages to, based on the routing", "= { 'key': {'required': True}, 'value': {'required': True}, 'endpoint_names': {'required':", "connection_string: Required. The connection string of the storage account. :type", "= { 'partition_ids': {'readonly': True}, 'path': {'readonly': True}, 'endpoint': {'readonly':", "'type': '{str}'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name',", "True}, 'verification_code': {'readonly': True}, 'certificate': {'readonly': True}, } _attribute_map =", "challenge nonce issued for the Proof-Of-Possession flow. Variables are only", "'name': {'key': 'name', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'long'},", "a request. :ivar minimum: The minimum number of units. :vartype", ":vartype job_id: str :ivar start_time_utc: The start time of the", "'expiry', 'type': 'rfc-1123'}, 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, 'is_verified': {'key':", "details. :vartype details: str \"\"\" _validation = { 'code': {'readonly':", "class FailoverInput(msrest.serialization.Model): \"\"\"Use to provide failover region when requesting manual", "partition ids in the Event Hub-compatible endpoint. :vartype partition_ids: list[str]", "populated in order to send to Azure. :param key: Required.", "self.verification_code = None self.certificate = None class CertificateVerificationDescription(msrest.serialization.Model): \"\"\"The JSON-serialized", "_attribute_map = { 'result': {'key': 'result', 'type': 'str'}, 'details': {'key':", "the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :param endpoints: The properties related", "Hub-compatible endpoint. :vartype partition_ids: list[str] :ivar path: The Event Hub-compatible", "{'key': 'line', 'type': 'int'}, 'column': {'key': 'column', 'type': 'int'}, }", "None) self.partition_ids = None self.path = None self.endpoint = None", "ServiceConnect, DeviceConnect\". :type rights: str or ~azure.mgmt.iothub.v2019_11_04.models.AccessRights \"\"\" _validation =", "self.updated = None self.verification_code = None self.certificate = None class", ":param body: Body of routing message. :type body: str :param", "certificate content. :vartype certificate: str \"\"\" _validation = { 'subject':", "and secondary location for iot hub. :vartype locations: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubLocationDescription] \"\"\"", ":vartype reason: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubNameUnavailabilityReason :param message: The detailed reason", "= { 'failover_region': {'required': True}, } _attribute_map = { 'failover_region':", "= kwargs.get('desired', None) self.reported = kwargs.get('reported', None) class SharedAccessSignatureAuthorizationRule(msrest.serialization.Model): \"\"\"The", "'type': 'int'}, 'partition_ids': {'key': 'partitionIds', 'type': '[str]'}, 'path': {'key': 'path',", ":param filter_name: Required. The name of the IP filter rule.", "hub. Possible values include: \"Free\", \"Standard\", \"Basic\". :vartype tier: str", "messages to, based on the routing rules. :type storage_containers: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerProperties]", "reserved: events, fileNotifications, $default. Endpoint names must be unique across", "__init__( self, **kwargs ): super(RoutingTwinProperties, self).__init__(**kwargs) self.desired = kwargs.get('desired', None)", "{ 'result': {'key': 'result', 'type': 'str'}, 'details': {'key': 'details', 'type':", "self, **kwargs ): super(EventHubProperties, self).__init__(**kwargs) self.retention_time_in_days = kwargs.get('retention_time_in_days', None) self.partition_count", "kwargs.get('value', None) self.next_link = None class IotHubSkuInfo(msrest.serialization.Model): \"\"\"Information about the", "the dictionary while making create or update calls for the", "resource type. :vartype type: str \"\"\" _validation = { 'id':", "across all endpoint types for free hubs. :type endpoints: ~azure.mgmt.iothub.v2019_11_04.models.RoutingEndpoints", "60 and 720 seconds. Default value is 300 seconds. :type", "or name for the enrichment property. :type key: str :param", "self.id = None self.name = None self.type = None self.etag", "For grammar, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param endpoint_names: Required.", "next_link: str \"\"\" _validation = { 'next_link': {'readonly': True}, }", "None) self.fallback_route = kwargs.get('fallback_route', None) self.enrichments = kwargs.get('enrichments', None) class", ":type value: list[~azure.mgmt.iothub.v2019_11_04.models.EventHubConsumerGroupInfo] :ivar next_link: The next link. :vartype next_link:", "the identity registry. :vartype enabled_device_count: long :ivar disabled_device_count: The count", "None self.type = None self.location = kwargs['location'] self.tags = kwargs.get('tags',", "\"\"\" _validation = { 'filter_name': {'required': True}, 'action': {'required': True},", "It contains a list of operations and a URL link", "only the Tags for a resource, allowing the user to", "'type': 'RouteProperties'}, 'twin': {'key': 'twin', 'type': 'RoutingTwin'}, } def __init__(", "the EventHubConsumerGroupInfo object. Variables are only populated by the server,", "Default value is 314572800(300MB). :type max_chunk_size_in_bytes: int :param encoding: Encoding", "\"\"\" _validation = { 'id': {'readonly': True}, 'name': {'readonly': True},", "{'readonly': True}, 'details': {'readonly': True}, } _attribute_map = { 'code':", "~datetime.datetime :param certificate: The certificate content. :type certificate: str \"\"\"", "Required. The name of storage container in the storage account.", "status code. :vartype http_status_code: str :ivar message: The error message.", "none of the conditions specified in the 'routes' section are", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages. :type retention_time_in_days: long :param partition_count: The number of", "or ~azure.mgmt.iothub.v2019_11_04.models.TestResultStatus :param details: Detailed result of testing route. :type", ":ivar name_available: The value which indicates whether the provided name", "= kwargs.get('features', None) self.locations = None class IotHubQuotaMetricInfo(msrest.serialization.Model): \"\"\"Quota metrics", "} def __init__( self, **kwargs ): super(CertificateDescription, self).__init__(**kwargs) self.properties =", "{ 'value': {'key': 'value', 'type': '[CertificateDescription]'}, } def __init__( self,", "~azure.mgmt.iothub.v2019_11_04.models.IotHubReplicaRoleType \"\"\" _attribute_map = { 'location': {'key': 'location', 'type': 'str'},", "request. :param value: JSON-serialized array of Endpoint health data. :type", "self.secondary_key = kwargs.get('secondary_key', None) self.rights = kwargs['rights'] class SharedAccessSignatureAuthorizationRuleListResult(msrest.serialization.Model): \"\"\"The", "list[~azure.mgmt.iothub.v2019_11_04.models.IotHubDescription] :ivar next_link: The next link. :vartype next_link: str \"\"\"", ":ivar description: Description of the operation. :vartype description: str \"\"\"", "{'readonly': True}, 'certificate': {'readonly': True}, } _attribute_map = { 'subject':", "'fallback_route': {'key': 'fallbackRoute', 'type': 'FallbackRouteProperties'}, 'enrichments': {'key': 'enrichments', 'type': '[EnrichmentProperties]'},", "} def __init__( self, **kwargs ): super(StorageEndpointProperties, self).__init__(**kwargs) self.sas_ttl_as_iso8601 =", "of testing route. Possible values include: \"undefined\", \"false\", \"true\". :type", "\"\"\" _attribute_map = { 'desired': {'key': 'desired', 'type': 'object'}, 'reported':", "'type': 'str'}, 'details': {'key': 'details', 'type': 'TestRouteResultDetails'}, } def __init__(", "are any. :vartype next_link: str \"\"\" _validation = { 'value':", "route error happened. :type line: int :param column: Column where", "524288000(500MB). Default value is 314572800(300MB). :type max_chunk_size_in_bytes: int :param encoding:", "self, **kwargs ): super(IpFilterRule, self).__init__(**kwargs) self.filter_name = kwargs['filter_name'] self.action =", "**kwargs ): super(IotHubDescription, self).__init__(**kwargs) self.etag = kwargs.get('etag', None) self.properties =", "'str'}, 'container_name': {'key': 'containerName', 'type': 'str'}, 'file_name_format': {'key': 'fileNameFormat', 'type':", "super(TestAllRoutesInput, self).__init__(**kwargs) self.routing_source = kwargs.get('routing_source', None) self.message = kwargs.get('message', None)", "'connection_string': {'required': True}, 'container_name': {'required': True}, } _attribute_map = {", "= kwargs['output_blob_container_uri'] class IotHubCapacity(msrest.serialization.Model): \"\"\"IoT Hub capacity information. Variables are", "quota metric. :vartype name: str :ivar current_value: The current value", "self, **kwargs ): super(IotHubProperties, self).__init__(**kwargs) self.authorization_policies = kwargs.get('authorization_policies', None) self.ip_filter_rules", "**kwargs ): super(TestAllRoutesInput, self).__init__(**kwargs) self.routing_source = kwargs.get('routing_source', None) self.message =", "{'key': 'lockDurationAsIso8601', 'type': 'duration'}, 'ttl_as_iso8601': {'key': 'ttlAsIso8601', 'type': 'duration'}, 'max_delivery_count':", "a given IoT hub name is available. Variables are only", ":param primary_key: The primary key. :type primary_key: str :param secondary_key:", "consumer group name. :vartype name: str :ivar type: the resource", "object :param reported: Twin desired properties. :type reported: object \"\"\"", "assigned to the shared access policy. Possible values include: \"RegistryRead\",", "send to Azure. :param message: Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage", "{'key': 'connectionString', 'type': 'str'}, 'container_name': {'key': 'containerName', 'type': 'str'}, }", "'default': {'readonly': True}, 'scale_type': {'readonly': True}, } _attribute_map = {", "): super(EndpointHealthDataListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None", "= kwargs['endpoint_names'] class ErrorDetails(msrest.serialization.Model): \"\"\"Error details. Variables are only populated", "region, can be either primary or secondary. The primary region", ":param compilation_errors: JSON-serialized list of route compilation errors. :type compilation_errors:", "the event hub endpoint. :type resource_group: str \"\"\" _validation =", "'maxChunkSizeInBytes', 'type': 'int'}, 'encoding': {'key': 'encoding', 'type': 'str'}, } def", "= None class EventHubConsumerGroupInfo(msrest.serialization.Model): \"\"\"The properties of the EventHubConsumerGroupInfo object.", "for testing route. All required parameters must be populated in", "= kwargs['filter_name'] self.action = kwargs['action'] self.ip_mask = kwargs['ip_mask'] class JobResponse(msrest.serialization.Model):", "\"\"\" _attribute_map = { 'line': {'key': 'line', 'type': 'int'}, 'column':", "container URI. :type output_blob_container_uri: str \"\"\" _validation = { 'input_blob_container_uri':", "Maximum number of bytes for each blob written to storage.", "'role': {'key': 'role', 'type': 'str'}, } def __init__( self, **kwargs", "self.name_available = None self.reason = None self.message = kwargs.get('message', None)", "None) self.app_properties = kwargs.get('app_properties', None) self.system_properties = kwargs.get('system_properties', None) class", "{'key': 'enrichments', 'type': '[EnrichmentProperties]'}, } def __init__( self, **kwargs ):", "which is evaluated in order to apply the fallback route.", "name: str :ivar type: The resource type. :vartype type: str", "and 524288000(500MB). Default value is 314572800(300MB). :type max_chunk_size_in_bytes: int :param", "used as a fall-back route when none of the conditions", "feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages. :type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601: The", "'type': 'str'}, } def __init__( self, **kwargs ): super(RoutingServiceBusQueueEndpointProperties, self).__init__(**kwargs)", "properties of the Job Response object. Variables are only populated", "): super(IotHubNameAvailabilityInfo, self).__init__(**kwargs) self.name_available = None self.reason = None self.message", "\"healthy\", \"unhealthy\", \"dead\". :type health_status: str or ~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthStatus \"\"\" _attribute_map", "{ 'filter_name': {'required': True}, 'action': {'required': True}, 'ip_mask': {'required': True},", "def __init__( self, **kwargs ): super(IotHubQuotaMetricInfoListResult, self).__init__(**kwargs) self.value = kwargs.get('value',", "True}, 'status': {'readonly': True}, 'failure_reason': {'readonly': True}, 'status_message': {'readonly': True},", "to, based on the routing rules. :type service_bus_queues: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusQueueEndpointProperties] :param", "'max_chunk_size_in_bytes': {'maximum': 524288000, 'minimum': 10485760}, } _attribute_map = { 'connection_string':", "connection_string: str :param container_name: Required. The name of the root", "tags: A set of tags. The resource tags. :type tags:", "Certificate including the challenge nonce issued for the Proof-Of-Possession flow.", "event_hubs: The list of Event Hubs endpoints that IoT hub", "RoutingTwinProperties(msrest.serialization.Model): \"\"\"RoutingTwinProperties. :param desired: Twin desired properties. :type desired: object", "{'required': True}, } _attribute_map = { 'sas_ttl_as_iso8601': {'key': 'sasTtlAsIso8601', 'type':", "\"RegistryRead, RegistryWrite, ServiceConnect, DeviceConnect\". :type rights: str or ~azure.mgmt.iothub.v2019_11_04.models.AccessRights \"\"\"", "kwargs['sku'] self.capacity = kwargs['capacity'] class IotHubSkuDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of", "'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubSkuDescriptionListResult, self).__init__(**kwargs)", "hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload. :type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count: The", "True}, 'resource': {'readonly': True}, 'operation': {'readonly': True}, 'description': {'readonly': True},", "Used to specify whether a route is enabled. :type is_enabled:", "\"B1\", \"B2\", \"B3\". :type name: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSku :ivar tier:", "'display', 'type': 'OperationDisplay'}, } def __init__( self, **kwargs ): super(Operation,", "str :param exclude_keys: Required. The value indicating whether keys should", "the messages which do not meet any of the conditions", "== failed, this string containing the reason for the failure.", "of the X509 leaf certificate .cer file or just .pem", "{'key': 'containerName', 'type': 'str'}, 'file_name_format': {'key': 'fileNameFormat', 'type': 'str'}, 'batch_frequency_in_seconds':", "of the route that is used as a fall-back route", "value of the quota metric. :vartype max_value: long \"\"\" _validation", "of X509 certificate .cer file or just .pem file content.", ":ivar parent_job_id: The job identifier of the parent job, if", "policies with a next link. Variables are only populated by", "} def __init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRule, self).__init__(**kwargs) self.key_name =", "endpoint_names: Required. The list of endpoints for which the enrichment", "\"RegistryWrite, ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect, DeviceConnect\". :type rights: str", "The resource type. :vartype type: str :param location: Required. The", "= { 'authorization_policies': {'key': 'authorizationPolicies', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'ip_filter_rules': {'key': 'ipFilterRules',", "{'required': True, 'max_items': 1, 'min_items': 1}, 'is_enabled': {'required': True}, }", "of the route. The name can only include alphanumeric characters,", "\"\"\" _validation = { 'partition_ids': {'readonly': True}, 'path': {'readonly': True},", "messaging#cloud-to-device-messages. :type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count: The number of times", "characters, and must be unique. :type name: str :param source:", "{'readonly': True}, 'locations': {'readonly': True}, } _attribute_map = { 'authorization_policies':", "self, **kwargs ): super(EndpointHealthData, self).__init__(**kwargs) self.endpoint_id = kwargs.get('endpoint_id', None) self.health_status", "endpoint will be updated to healthy when IoT Hub has", "and will be ignored when sending a request. :ivar minimum:", "'type': 'RoutingTwin'}, } def __init__( self, **kwargs ): super(TestRouteInput, self).__init__(**kwargs)", "'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'long'}, 'max_value': {'key': 'maxValue',", "properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificatePropertiesWithNonce :ivar id: The resource identifier. :vartype id: str", "'type': {'readonly': True}, 'etag': {'readonly': True}, } _attribute_map = {", "): super(EnrichmentProperties, self).__init__(**kwargs) self.key = kwargs['key'] self.value = kwargs['value'] self.endpoint_names", "'location': {'key': 'location', 'type': 'str'}, 'role': {'key': 'role', 'type': 'str'},", "'condition', 'type': 'str'}, 'endpoint_names': {'key': 'endpointNames', 'type': '[str]'}, 'is_enabled': {'key':", "= { 'line': {'key': 'line', 'type': 'int'}, 'column': {'key': 'column',", "SharedAccessSignatureAuthorizationRule(msrest.serialization.Model): \"\"\"The properties of an IoT hub shared access policy.", "long :ivar default: The default number of units. :vartype default:", "custom endpoints. See: https://aka.ms/telemetryoneventgrid. :type enrichments: list[~azure.mgmt.iothub.v2019_11_04.models.EnrichmentProperties] \"\"\" _attribute_map =", "self.current_value = kwargs.get('current_value', None) self.limit = kwargs.get('limit', None) self.name =", "The key or name for the enrichment property. :type key:", "{ 'start': {'key': 'start', 'type': 'RouteErrorPosition'}, 'end': {'key': 'end', 'type':", "'created': {'key': 'created', 'type': 'rfc-1123'}, 'updated': {'key': 'updated', 'type': 'rfc-1123'},", "of shared access policies. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :ivar next_link: The", "shared access policy. Possible values include: \"RegistryRead\", \"RegistryWrite\", \"ServiceConnect\", \"DeviceConnect\",", "str] :param system_properties: System properties. :type system_properties: dict[str, str] \"\"\"", "all devices in the IoT hub. All required parameters must", "= { 'message': {'key': 'message', 'type': 'RoutingMessage'}, 'route': {'key': 'route',", "def __init__( self, **kwargs ): super(ErrorDetails, self).__init__(**kwargs) self.code = None", "__init__( self, **kwargs ): super(IotHubDescription, self).__init__(**kwargs) self.etag = kwargs.get('etag', None)", "comments. :type comments: str :param features: The capabilities and features", "of all devices in the IoT hub. All required parameters", "'str'}, 'ip_mask': {'key': 'ipMask', 'type': 'str'}, } def __init__( self,", "str \"\"\" _validation = { 'failover_region': {'required': True}, } _attribute_map", "super(JobResponse, self).__init__(**kwargs) self.job_id = None self.start_time_utc = None self.end_time_utc =", "= kwargs.get('storage_endpoints', None) self.messaging_endpoints = kwargs.get('messaging_endpoints', None) self.enable_file_upload_notifications = kwargs.get('enable_file_upload_notifications',", "Detailed result of testing route. :type details: ~azure.mgmt.iothub.v2019_11_04.models.TestRouteResultDetails \"\"\" _attribute_map", "the locations where a resource is provisioned. :param location: The", "Encoding that is used to serialize messages to blobs. Supported", "of the resource. :vartype resource_type: str :param sku: Required. The", "self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601', None) self.max_delivery_count = kwargs.get('max_delivery_count', None) class ImportDevicesRequest(msrest.serialization.Model):", "self.parent_job_id = None class JobResponseListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of JobResponse", "'cloud_to_device': {'key': 'cloudToDevice', 'type': 'CloudToDeviceProperties'}, 'comments': {'key': 'comments', 'type': 'str'},", "upload. :type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count: The number of times", "True}, 'thumbprint': {'readonly': True}, 'is_verified': {'readonly': True}, 'created': {'readonly': True},", "to send to Azure. :param export_blob_container_uri: Required. The export blob", "hub. :type event_hub_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.EventHubProperties] :param routing: The routing related", ":type routing_source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param message: Routing message. :type", "\"Manual\", \"None\". :vartype scale_type: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubScaleType \"\"\" _validation =", "Hub units. See: https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits. :type capacity: long \"\"\" _validation =", "str :ivar state: The hub state. :vartype state: str :ivar", "objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubQuotaMetricInfo] :ivar next_link: The next link. :vartype", "error details. :vartype details: str \"\"\" _validation = { 'code':", "of the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :type routing: ~azure.mgmt.iothub.v2019_11_04.models.RoutingProperties :param", "self).__init__(**kwargs) self.name = kwargs['name'] self.source = kwargs['source'] self.condition = kwargs.get('condition',", "Azure. :param connection_string: Required. The connection string of the event", "endpoint. The name can only include alphanumeric characters, periods, underscores,", "The permissions assigned to the shared access policy. Possible values", "self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class IotHubSkuDescription(msrest.serialization.Model):", "= None class RegistryStatistics(msrest.serialization.Model): \"\"\"Identity registry statistics. Variables are only", "unhealthy endpoint will be updated to healthy when IoT Hub", "self, **kwargs ): super(RouteProperties, self).__init__(**kwargs) self.name = kwargs['name'] self.source =", "{'key': 'systemProperties', 'type': '{str}'}, } def __init__( self, **kwargs ):", "'input_blob_container_uri': {'required': True}, 'output_blob_container_uri': {'required': True}, } _attribute_map = {", "'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__( self,", ":type line: int :param column: Column where the route error", "verified. :vartype is_verified: bool :ivar created: The certificate's create date", "'max_delivery_count': {'maximum': 100, 'minimum': 1}, } _attribute_map = { 'max_delivery_count':", "are 'avro', 'avrodeflate', and 'JSON'. Default value is 'avro'. Possible", "_attribute_map = { 'resource_type': {'key': 'resourceType', 'type': 'str'}, 'sku': {'key':", "str or ~azure.mgmt.iothub.v2019_11_04.models.TestResultStatus :param details: Detailed result of testing route.", "Proof-Of-Possession flow. Variables are only populated by the server, and", "of the resource. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo :param capacity: Required. IotHub", "'failover_region': {'key': 'failoverRegion', 'type': 'str'}, } def __init__( self, **kwargs", "'connectionString', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'subscription_id': {'key':", "{'readonly': True}, 'end_time_utc': {'readonly': True}, 'type': {'readonly': True}, 'status': {'readonly':", "which do not meet any of the conditions specified in", "= None class Resource(msrest.serialization.Model): \"\"\"The common properties of an Azure", "request. :ivar name: The name of the quota metric. :vartype", "} _attribute_map = { 'key_name': {'key': 'keyName', 'type': 'str'}, 'primary_key':", "} _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name':", ":param certificate: base-64 representation of X509 certificate .cer file or", "IotHubQuotaMetricInfo objects with a next link. Variables are only populated", "'twin', 'type': 'RoutingTwin'}, } def __init__( self, **kwargs ): super(TestRouteInput,", "operation. :vartype description: str \"\"\" _validation = { 'provider': {'readonly':", "endpoint. :vartype partition_ids: list[str] :ivar path: The Event Hub-compatible name.", "None) self.comments = kwargs.get('comments', None) self.features = kwargs.get('features', None) self.locations", "str :param features: The capabilities and features enabled for the", "= None class ExportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters when requesting", "self).__init__(**kwargs) self.location = kwargs.get('location', None) self.role = kwargs.get('role', None) class", "Required. The name of the IP filter rule. :type filter_name:", "'role', 'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubLocationDescription,", "id: The Event Hub-compatible consumer group identifier. :vartype id: str", "{'key': 'resourceGroup', 'type': 'str'}, 'container_name': {'key': 'containerName', 'type': 'str'}, 'file_name_format':", "endpoint. :type subscription_id: str :param resource_group: The name of the", "None) self.rights = kwargs['rights'] class SharedAccessSignatureAuthorizationRuleListResult(msrest.serialization.Model): \"\"\"The list of shared", "{'key': 'endpoint', 'type': 'str'}, } def __init__( self, **kwargs ):", "is provided, it evaluates to true by default. For grammar,", "message on the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type max_delivery_count:", "endpoint. Possible values include: \"unknown\", \"healthy\", \"unhealthy\", \"dead\". :type health_status:", "'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubNameAvailabilityInfo, self).__init__(**kwargs)", "Description of the operation. :vartype description: str \"\"\" _validation =", "= None self.capacity = kwargs.get('capacity', None) class IpFilterRule(msrest.serialization.Model): \"\"\"The IP", "\"Standard\", \"Basic\". :vartype tier: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuTier :param capacity: The", "'type': 'str'}, } def __init__( self, **kwargs ): super(OperationInputs, self).__init__(**kwargs)", "'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag',", "list[~azure.mgmt.iothub.v2019_11_04.models.Operation] :ivar next_link: URL to get the next set of", "import HttpResponseError import msrest.serialization class CertificateBodyDescription(msrest.serialization.Model): \"\"\"The JSON-serialized X509 Certificate.", "'condition': {'key': 'condition', 'type': 'str'}, 'endpoint_names': {'key': 'endpointNames', 'type': '[str]'},", "'str'}, 'capacity': {'key': 'capacity', 'type': 'long'}, } def __init__( self,", "ExportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters when requesting an export of", "RegistryStatistics(msrest.serialization.Model): \"\"\"Identity registry statistics. Variables are only populated by the", "SKU of the IoT hub. Variables are only populated by", ":ivar failure_reason: If status == failed, this string containing the", "): super(TestAllRoutesResult, self).__init__(**kwargs) self.routes = kwargs.get('routes', None) class TestRouteInput(msrest.serialization.Model): \"\"\"Input", "= kwargs.get('properties', None) self.id = None self.name = None self.type", "'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubDescriptionListResult,", "of the endpoint. :type endpoint_id: str :param health_status: Health statuses", "'description', 'type': 'str'}, } def __init__( self, **kwargs ): super(OperationDisplay,", "'nameAvailable', 'type': 'bool'}, 'reason': {'key': 'reason', 'type': 'str'}, 'message': {'key':", "self.endpoints = kwargs.get('endpoints', None) self.routes = kwargs.get('routes', None) self.fallback_route =", "default time to live for cloud-to-device messages in the device", "None) self.source = kwargs['source'] self.condition = kwargs.get('condition', None) self.endpoint_names =", "def __init__( self, **kwargs ): super(Resource, self).__init__(**kwargs) self.id = None", ":vartype operation: str :ivar description: Description of the operation. :vartype", "to route messages to endpoints. All required parameters must be", "= { 'body': {'key': 'body', 'type': 'str'}, 'app_properties': {'key': 'appProperties',", "self.message = kwargs.get('message', None) class IotHubProperties(msrest.serialization.Model): \"\"\"The properties of an", "def __init__( self, **kwargs ): super(EnrichmentProperties, self).__init__(**kwargs) self.key = kwargs['key']", "str :ivar start_time_utc: The start time of the job. :vartype", "~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition \"\"\" _attribute_map = { 'start': {'key': 'start', 'type': 'RouteErrorPosition'},", "event hub endpoint. :type connection_string: str :param name: Required. The", "= None class EnrichmentProperties(msrest.serialization.Model): \"\"\"The properties of an enrichment that", "\"\"\"Quota metrics properties. Variables are only populated by the server,", "kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id = kwargs.get('subscription_id', None) self.resource_group =", "The type of the job. Possible values include: \"unknown\", \"export\",", "is the Azure disaster recovery (DR) paired region and also", "'sas_ttl_as_iso8601': {'key': 'sasTtlAsIso8601', 'type': 'duration'}, 'connection_string': {'key': 'connectionString', 'type': 'str'},", "'certificate': {'readonly': True}, } _attribute_map = { 'subject': {'key': 'subject',", "None) self.severity = kwargs.get('severity', None) self.location = kwargs.get('location', None) class", "this endpoint. The name can only include alphanumeric characters, periods,", "'rfc-1123'}, 'certificate': {'key': 'certificate', 'type': 'str'}, } def __init__( self,", "include: \"unknown\", \"healthy\", \"unhealthy\", \"dead\". :type health_status: str or ~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthStatus", "'min_items': 1}, } _attribute_map = { 'key': {'key': 'key', 'type':", "shows that the IoT Hub has not established a connection", "localized_value: Localized value of name. :type localized_value: str \"\"\" _attribute_map", "5 routing rules are allowed for free hubs. :type routes:", "'httpStatusCode', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'details': {'key':", "order to send to Azure. :param sas_ttl_as_iso8601: The period of", "str :ivar http_status_code: The HTTP status code. :vartype http_status_code: str", "| write | action | delete}. :vartype name: str :param", "'updated': {'readonly': True}, } _attribute_map = { 'subject': {'key': 'subject',", "when sending a request. :param properties: The tags. :type properties:", "def __init__( self, **kwargs ): super(OperationDisplay, self).__init__(**kwargs) self.provider = None", "~azure.mgmt.iothub.v2019_11_04.models.RouteErrorRange \"\"\" _attribute_map = { 'message': {'key': 'message', 'type': 'str'},", ":ivar type: The resource type. :vartype type: str \"\"\" _validation", "nonce issued for the Proof-Of-Possession flow. Variables are only populated", "resource, allowing the user to update the tags on an", "**kwargs ): super(IotHubSkuDescription, self).__init__(**kwargs) self.resource_type = None self.sku = kwargs['sku']", "events. This key has to be present in the dictionary", "): super(ImportDevicesRequest, self).__init__(**kwargs) self.input_blob_container_uri = kwargs['input_blob_container_uri'] self.output_blob_container_uri = kwargs['output_blob_container_uri'] class", "'type': 'OperationDisplay'}, } def __init__( self, **kwargs ): super(Operation, self).__init__(**kwargs)", "'type': 'str'}, 'container_name': {'key': 'containerName', 'type': 'str'}, } def __init__(", "CertificateWithNonceDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables are only populated by the", "location. :type location: str :param tags: A set of tags.", "~azure.mgmt.iothub.v2019_11_04.models.IotHubScaleType \"\"\" _validation = { 'minimum': {'readonly': True, 'maximum': 1,", ":param default_ttl_as_iso8601: The default time to live for cloud-to-device messages", "{'readonly': True}, 'expiry': {'readonly': True}, 'thumbprint': {'readonly': True}, 'is_verified': {'readonly':", "'message': {'key': 'message', 'type': 'RoutingMessage'}, 'twin': {'key': 'twin', 'type': 'RoutingTwin'},", "} _attribute_map = { 'minimum': {'key': 'minimum', 'type': 'long'}, 'maximum':", "_attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'type': {'key':", "identity registry. :vartype disabled_device_count: long \"\"\" _validation = { 'total_device_count':", "= kwargs.get('app_properties', None) self.system_properties = kwargs.get('system_properties', None) class RoutingProperties(msrest.serialization.Model): \"\"\"The", "None) self.default_ttl_as_iso8601 = kwargs.get('default_ttl_as_iso8601', None) self.feedback = kwargs.get('feedback', None) class", "or ~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerPropertiesEncoding \"\"\" _validation = { 'connection_string': {'required': True}, 'name':", "\"\"\"Result of testing all routes. :param routes: JSON-serialized array of", "hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :type routing: ~azure.mgmt.iothub.v2019_11_04.models.RoutingProperties :param storage_endpoints: The list", "self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601', None) self.max_delivery_count = kwargs.get('max_delivery_count', None) class Name(msrest.serialization.Model):", "'{str}'}, } def __init__( self, **kwargs ): super(RoutingMessage, self).__init__(**kwargs) self.body", "'rfc-1123'}, 'verification_code': {'key': 'verificationCode', 'type': 'str'}, 'certificate': {'key': 'certificate', 'type':", "key_name: str :param primary_key: The primary key. :type primary_key: str", "'retentionTimeInDays', 'type': 'long'}, 'partition_count': {'key': 'partitionCount', 'type': 'int'}, 'partition_ids': {'key':", "value: List of IoT Hub operations supported by the Microsoft.Devices", "): super(FailoverInput, self).__init__(**kwargs) self.failover_region = kwargs['failover_region'] class FallbackRouteProperties(msrest.serialization.Model): \"\"\"The properties", "\"RegistryRead\", \"RegistryWrite\", \"ServiceConnect\", \"DeviceConnect\", \"RegistryRead, RegistryWrite\", \"RegistryRead, ServiceConnect\", \"RegistryRead, DeviceConnect\",", "{'maximum': 100, 'minimum': 1}, } _attribute_map = { 'max_delivery_count': {'key':", "array of EndpointHealthData objects with a next link. Variables are", "container in the storage account. :type container_name: str :param file_name_format:", "operation: Name of the operation. :vartype operation: str :ivar description:", "self, **kwargs ): super(SharedAccessSignatureAuthorizationRuleListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link", ":vartype code: str :ivar http_status_code: The HTTP status code. :vartype", "= { 'retention_time_in_days': {'key': 'retentionTimeInDays', 'type': 'long'}, 'partition_count': {'key': 'partitionCount',", "'type': 'long'}, 'max_value': {'key': 'maxValue', 'type': 'long'}, } def __init__(", "twin: Routing Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _validation =", "{'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'etag':", "provider. :vartype value: list[~azure.mgmt.iothub.v2019_11_04.models.Operation] :ivar next_link: URL to get the", "that identifies this endpoint. The name can only include alphanumeric", "about the SKU of the IoT hub. Variables are only", "If the condition is not provided it will evaluate to", "'str'}, 'start_time_utc': {'key': 'startTimeUtc', 'type': 'rfc-1123'}, 'end_time_utc': {'key': 'endTimeUtc', 'type':", "of times the IoT hub attempts to deliver a message", "Required. The name of the SKU. Possible values include: \"F1\",", "the IoT hub. :type authorization_policies: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :param ip_filter_rules: The IP", "$default. Endpoint names must be unique across endpoint types. The", "\"S1\", \"S2\", \"S3\", \"B1\", \"B2\", \"B3\". :type name: str or", "Azure. :param key: Required. The key or name for the", "on the routing rules. :type service_bus_topics: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusTopicEndpointProperties] :param event_hubs: The", "} def __init__( self, **kwargs ): super(EventHubProperties, self).__init__(**kwargs) self.retention_time_in_days =", "None) self.type = kwargs.get('type', None) self.unit = kwargs.get('unit', None) self.current_value", "\"\"\" _validation = { 'provider': {'readonly': True}, 'resource': {'readonly': True},", "Time interval at which blobs are written to storage. Value", "self, **kwargs ): super(TagsResource, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) class", "def __init__( self, **kwargs ): super(RouteErrorPosition, self).__init__(**kwargs) self.line = kwargs.get('line',", "properties related to a storage container endpoint. All required parameters", "{ 'tags': {'key': 'tags', 'type': 'object'}, 'properties': {'key': 'properties', 'type':", "or ~azure.mgmt.iothub.v2019_11_04.models.JobStatus :ivar failure_reason: If status == failed, this string", "'type': '[MatchedRoute]'}, } def __init__( self, **kwargs ): super(TestAllRoutesResult, self).__init__(**kwargs)", "is 300 seconds. :type batch_frequency_in_seconds: int :param max_chunk_size_in_bytes: Maximum number", "kwargs.get('id', None) self.type = kwargs.get('type', None) self.unit = kwargs.get('unit', None)", "HTTP status code. :vartype http_status_code: str :ivar message: The error", "~azure.mgmt.iothub.v2019_11_04.models.CloudToDeviceProperties :param comments: IoT hub comments. :type comments: str :param", "endpoint_id: str :param health_status: Health statuses have following meanings. The", "routing message. :type body: str :param app_properties: App properties. :type", "The tags. :type properties: dict[str, str] :ivar id: The Event", "description of an X509 CA Certificate. Variables are only populated", "'type': 'str'}, 'parent_job_id': {'key': 'parentJobId', 'type': 'str'}, } def __init__(", "{'required': True}, 'ip_mask': {'required': True}, } _attribute_map = { 'filter_name':", "**kwargs ): super(IotHubLocationDescription, self).__init__(**kwargs) self.location = kwargs.get('location', None) self.role =", "None self.sku = kwargs['sku'] self.capacity = kwargs['capacity'] class IotHubSkuDescriptionListResult(msrest.serialization.Model): \"\"\"The", "def __init__( self, **kwargs ): super(TestAllRoutesResult, self).__init__(**kwargs) self.routes = kwargs.get('routes',", "} _attribute_map = { 'properties': {'key': 'properties', 'type': 'CertificateProperties'}, 'id':", "'type': 'long'}, } def __init__( self, **kwargs ): super(RegistryStatistics, self).__init__(**kwargs)", "True, 'maximum': 1, 'minimum': 1}, 'maximum': {'readonly': True}, 'default': {'readonly':", "'type': {'readonly': True}, 'location': {'required': True}, } _attribute_map = {", "**kwargs ): super(RouteProperties, self).__init__(**kwargs) self.name = kwargs['name'] self.source = kwargs['source']", "types. :type name: str :param subscription_id: The subscription identifier of", "when sending a request. :param value: The array of quota", "'type': 'str'}, } def __init__( self, **kwargs ): super(JobResponseListResult, self).__init__(**kwargs)", "link. Variables are only populated by the server, and will", "queue endpoint. :type resource_group: str \"\"\" _validation = { 'connection_string':", "to True, causes an error to be thrown. :type storage_endpoints:", "None) class EndpointHealthDataListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of EndpointHealthData objects with", "): super(CertificateProperties, self).__init__(**kwargs) self.subject = None self.expiry = None self.thumbprint", "self.value = kwargs.get('value', None) self.next_link = None class IotHubLocationDescription(msrest.serialization.Model): \"\"\"Public", "to check. :type name: str \"\"\" _validation = { 'name':", "location: The name of the Azure region. :type location: str", "JSON-serialized array of Certificate objects. :param value: The array of", "'type': '{str}'}, 'system_properties': {'key': 'systemProperties', 'type': '{str}'}, } def __init__(", "cloud-to-device messaging properties. :type cloud_to_device: ~azure.mgmt.iothub.v2019_11_04.models.CloudToDeviceProperties :param comments: IoT hub", "current_value: The current value for the quota metric. :vartype current_value:", "container URI. :type input_blob_container_uri: str :param output_blob_container_uri: Required. The output", ":param sku: Required. IotHub SKU info. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo \"\"\"", "this endpoint. Possible values include: \"unknown\", \"healthy\", \"unhealthy\", \"dead\". :type", "The job identifier of the parent job, if any. :vartype", "_validation = { 'source': {'required': True}, 'endpoint_names': {'required': True, 'max_items':", "either primary or secondary. The primary region is where the", "'etag', 'type': 'str'}, } def __init__( self, **kwargs ): super(EventHubConsumerGroupInfo,", "file upload. All required parameters must be populated in order", "ignored when sending a request. :ivar name_available: The value which", "{'key': 'value', 'type': '[IotHubSkuDescription]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, }", "'[Operation]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self,", "'value': {'key': 'value', 'type': '[UserSubscriptionQuota]'}, 'next_link': {'key': 'nextLink', 'type': 'str'},", "'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'unit': {'key': 'unit',", "rule is to be applied to, such as DeviceMessages. Possible", "endpoint. :vartype endpoint: str \"\"\" _validation = { 'partition_ids': {'readonly':", "related to an event hub endpoint. All required parameters must", "primary region is where the IoT hub is currently provisioned.", "= kwargs.get('batch_frequency_in_seconds', None) self.max_chunk_size_in_bytes = kwargs.get('max_chunk_size_in_bytes', None) self.encoding = kwargs.get('encoding',", "'host_name': {'key': 'hostName', 'type': 'str'}, 'event_hub_endpoints': {'key': 'eventHubEndpoints', 'type': '{EventHubProperties}'},", "in order to send to Azure. :ivar resource_type: The type", "tags: dict[str, str] \"\"\" _attribute_map = { 'tags': {'key': 'tags',", "self).__init__(**kwargs) self.code = None self.http_status_code = None self.message = None", ":ivar next_link: URL to get the next set of operation", "'str'}, 'file_name_format': {'key': 'fileNameFormat', 'type': 'str'}, 'batch_frequency_in_seconds': {'key': 'batchFrequencyInSeconds', 'type':", "properties: Properties of routes that matched. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties \"\"\"", "only include alphanumeric characters, periods, underscores, hyphens, has a maximum", "{'key': 'routing', 'type': 'RoutingProperties'}, 'storage_endpoints': {'key': 'storageEndpoints', 'type': '{StorageEndpointProperties}'}, 'messaging_endpoints':", "{'key': 'message', 'type': 'RoutingMessage'}, 'twin': {'key': 'twin', 'type': 'RoutingTwin'}, }", "None) self.system_properties = kwargs.get('system_properties', None) class RoutingProperties(msrest.serialization.Model): \"\"\"The routing related", "**kwargs ): super(CertificateListDescription, self).__init__(**kwargs) self.value = kwargs.get('value', None) class CertificateProperties(msrest.serialization.Model):", "'value': {'key': 'value', 'type': '[EventHubConsumerGroupInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'},", "'endpoint_id': {'key': 'endpointId', 'type': 'str'}, 'health_status': {'key': 'healthStatus', 'type': 'str'},", ":ivar type: The type of the job. Possible values include:", "~azure.mgmt.iothub.v2019_11_04.models.FallbackRouteProperties :param enrichments: The list of user-provided enrichments that the", "= kwargs.get('max_chunk_size_in_bytes', None) self.encoding = kwargs.get('encoding', None) class RoutingTwin(msrest.serialization.Model): \"\"\"Twin", "messaging properties. :param max_delivery_count: The max delivery count for cloud-to-device", "int \"\"\" _attribute_map = { 'line': {'key': 'line', 'type': 'int'},", ":vartype id: str :ivar name: The Event Hub-compatible consumer group", "'str'}, 'output_blob_container_uri': {'key': 'outputBlobContainerUri', 'type': 'str'}, } def __init__( self,", "in days. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages. :type retention_time_in_days: long :param partition_count: The", "and will be ignored when sending a request. :param authorization_policies:", "cloud-to-device messages. :param lock_duration_as_iso8601: The lock duration for the feedback", "maximum of 100 routing rules are allowed for paid hubs", "etag: str \"\"\" _validation = { 'id': {'readonly': True}, 'name':", "'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly': True}, 'location': {'required': True}, 'sku': {'required':", "_validation = { 'total_device_count': {'readonly': True}, 'enabled_device_count': {'readonly': True}, 'disabled_device_count':", "for the IoT hub. Possible values include: \"None\", \"DeviceManagement\". :type", "'exclude_keys': {'key': 'excludeKeys', 'type': 'bool'}, } def __init__( self, **kwargs", "'str'}, 'is_verified': {'key': 'isVerified', 'type': 'bool'}, 'created': {'key': 'created', 'type':", "'fileNameFormat', 'type': 'str'}, 'batch_frequency_in_seconds': {'key': 'batchFrequencyInSeconds', 'type': 'int'}, 'max_chunk_size_in_bytes': {'key':", "class JobResponseListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of JobResponse objects with a", "the service bus topic endpoint. :type resource_group: str \"\"\" _validation", "where the route error happened. :type line: int :param column:", "str \"\"\" _validation = { 'code': {'readonly': True}, 'http_status_code': {'readonly':", "response body, it must also be provided as a header", "{ 'provisioning_state': {'readonly': True}, 'state': {'readonly': True}, 'host_name': {'readonly': True},", "{'key': 'minimum', 'type': 'long'}, 'maximum': {'key': 'maximum', 'type': 'long'}, 'default':", "~datetime.timedelta :param feedback: The properties of the feedback queue for", "The list of endpoints to which messages that satisfy the", "\"failed\", \"cancelled\". :vartype status: str or ~azure.mgmt.iothub.v2019_11_04.models.JobStatus :ivar failure_reason: If", "source. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type", "\"\"\"Use to provide parameters when requesting an export of all", "= None class IotHubSkuDescription(msrest.serialization.Model): \"\"\"SKU properties. Variables are only populated", "self).__init__(**kwargs) self.body = kwargs.get('body', None) self.app_properties = kwargs.get('app_properties', None) self.system_properties", "'value', 'type': '[JobResponse]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def", "EventHubConsumerGroupInfo object. Variables are only populated by the server, and", ":type capacity: long \"\"\" _validation = { 'name': {'required': True},", "behavior and will be lost if the code is regenerated.", "route. :type details: ~azure.mgmt.iothub.v2019_11_04.models.TestRouteResultDetails \"\"\" _attribute_map = { 'result': {'key':", "is evaluated to apply the routing rule. If no condition", ":param ip_filter_rules: The IP filter rules. :type ip_filter_rules: list[~azure.mgmt.iothub.v2019_11_04.models.IpFilterRule] :ivar", "be populated in order to send to Azure. :param connection_string:", "by default. For grammar, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param", "\"\"\" _validation = { 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'source':", "True}, 'created': {'readonly': True}, 'updated': {'readonly': True}, } _attribute_map =", "\"\"\"Use to provide parameters when requesting an import of all", "_attribute_map = { 'value': {'key': 'value', 'type': '[CertificateDescription]'}, } def", "'thumbprint', 'type': 'str'}, 'is_verified': {'key': 'isVerified', 'type': 'bool'}, 'created': {'key':", "IoT Hub metrics to identify errors and monitor issues with", "the job. :vartype status_message: str :ivar parent_job_id: The job identifier", "str :param tags: A set of tags. The resource tags.", ":param ip_mask: Required. A string that contains the IP address", "tags. :type tags: dict[str, str] \"\"\" _attribute_map = { 'tags':", "the service bus queue endpoint. :type resource_group: str \"\"\" _validation", "of 64 characters. The following names are reserved: events, fileNotifications,", "None) class TestRouteResult(msrest.serialization.Model): \"\"\"Result of testing one route. :param result:", "to storage. Value should be between 60 and 720 seconds.", "} _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'current_value':", "The Etag field is *not* required. If it is provided", "'export_blob_container_uri': {'key': 'exportBlobContainerUri', 'type': 'str'}, 'exclude_keys': {'key': 'excludeKeys', 'type': 'bool'},", "storage_endpoints: The list of Azure Storage endpoints where you can", "send to Azure. :param filter_name: Required. The name of the", "'int'}, 'default_ttl_as_iso8601': {'key': 'defaultTtlAsIso8601', 'type': 'duration'}, 'feedback': {'key': 'feedback', 'type':", "limit on IotHub type. :type limit: int :param name: IotHub", "None) self.storage_containers = kwargs.get('storage_containers', None) class RoutingEventHubProperties(msrest.serialization.Model): \"\"\"The properties related", "\"Avro\", \"AvroDeflate\", \"JSON\". :type encoding: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerPropertiesEncoding \"\"\" _validation", "Tags for a resource, allowing the user to update the", "message. :vartype message: str :ivar details: The error details. :vartype", "rules are allowed for free hubs. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.RouteProperties] :param", "{'required': True}, 'sku': {'required': True}, } _attribute_map = { 'id':", "'endpoint_names': {'key': 'endpointNames', 'type': '[str]'}, 'is_enabled': {'key': 'isEnabled', 'type': 'bool'},", "quota response. :param id: IotHub type id. :type id: str", ":param tags: A set of tags. The resource tags. :type", "'compilation_errors': {'key': 'compilationErrors', 'type': '[RouteCompilationError]'}, } def __init__( self, **kwargs", "https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits. :type capacity: long \"\"\" _validation = { 'name': {'required':", "\"AvroDeflate\", \"JSON\". :type encoding: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerPropertiesEncoding \"\"\" _validation =", "device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type default_ttl_as_iso8601: ~datetime.timedelta :param feedback:", "r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly': True}, 'location': {'required': True}, 'sku': {'required': True},", "is events. This key has to be present in the", "array of Certificate objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.CertificateDescription] \"\"\" _attribute_map =", "to which messages that satisfy the condition are routed. Currently", "group of the storage account. :type resource_group: str :param container_name:", "across endpoint types. The name need not be the same", ":param desired: Twin desired properties. :type desired: object :param reported:", "None) self.id = None self.name = None self.type = None", "devices in the identity registry. :vartype disabled_device_count: long \"\"\" _validation", "following meanings. The 'healthy' status shows that the endpoint is", "None) self.endpoint_names = kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class FeedbackProperties(msrest.serialization.Model): \"\"\"The", "def __init__( self, **kwargs ): super(OperationInputs, self).__init__(**kwargs) self.name = kwargs['name']", "__init__( self, **kwargs ): super(JobResponse, self).__init__(**kwargs) self.job_id = None self.start_time_utc", "'str'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__( self,", "None self.expiry = None self.thumbprint = None self.is_verified = None", "names are reserved: events, fileNotifications, $default. Endpoint names must be", "kwargs.get('limit', None) self.name = kwargs.get('name', None) class UserSubscriptionQuotaListResult(msrest.serialization.Model): \"\"\"Json-serialized array", "None self.name = None self.etag = None self.type = None", "RoutingStorageContainerProperties(msrest.serialization.Model): \"\"\"The properties related to a storage container endpoint. All", "endpoint. All required parameters must be populated in order to", "str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubScaleType \"\"\" _validation = { 'minimum': {'readonly': True,", "= kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class FeedbackProperties(msrest.serialization.Model): \"\"\"The properties of", "{'key': 'updated', 'type': 'rfc-1123'}, 'verification_code': {'key': 'verificationCode', 'type': 'str'}, 'certificate':", "Start where the route error happened. :type start: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition :param", "name: str :param display: The object that represents the operation.", "super(OperationDisplay, self).__init__(**kwargs) self.provider = None self.resource = None self.operation =", "alphanumeric characters, periods, underscores, hyphens and has a maximum length", "The list of endpoints to which the messages that satisfy", "just .pem file content. :type certificate: str \"\"\" _attribute_map =", "'maximum': {'key': 'maximum', 'type': 'long'}, 'default': {'key': 'default', 'type': 'long'},", "to send to Azure. :param input_blob_container_uri: Required. The input blob", "limit: int :param name: IotHub type. :type name: ~azure.mgmt.iothub.v2019_11_04.models.Name \"\"\"", "for the IoT hub. Possible values include: \"Free\", \"Standard\", \"Basic\".", "{'key': 'message', 'type': 'RoutingMessage'}, 'route': {'key': 'route', 'type': 'RouteProperties'}, 'twin':", "Value should be between 10485760(10MB) and 524288000(500MB). Default value is", "default: long :ivar scale_type: The type of the scaling enabled.", "def __init__( self, **kwargs ): super(JobResponseListResult, self).__init__(**kwargs) self.value = kwargs.get('value',", "**kwargs ): super(ImportDevicesRequest, self).__init__(**kwargs) self.input_blob_container_uri = kwargs['input_blob_container_uri'] self.output_blob_container_uri = kwargs['output_blob_container_uri']", "_validation = { 'resource_type': {'readonly': True}, 'sku': {'required': True}, 'capacity':", "{'required': True}, 'container_name': {'required': True}, } _attribute_map = { 'sas_ttl_as_iso8601':", "of the service bus queue endpoint. :type resource_group: str \"\"\"", ":type filter_name: str :param action: Required. The desired action for", "{'readonly': True}, 'type': {'readonly': True}, 'status': {'readonly': True}, 'failure_reason': {'readonly':", "next link. :vartype next_link: str \"\"\" _validation = { 'next_link':", "'current_value': {'key': 'currentValue', 'type': 'long'}, 'max_value': {'key': 'maxValue', 'type': 'long'},", "event_hub_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.EventHubProperties] :param routing: The routing related properties of", "{ 'name': {'required': True}, } _attribute_map = { 'name': {'key':", "'resource_group': {'key': 'resourceGroup', 'type': 'str'}, } def __init__( self, **kwargs", "results. :vartype next_link: str \"\"\" _validation = { 'next_link': {'readonly':", "self.comments = kwargs.get('comments', None) self.features = kwargs.get('features', None) self.locations =", "'bool'}, } def __init__( self, **kwargs ): super(RouteProperties, self).__init__(**kwargs) self.name", "unique. :type name: str :param source: Required. The source to", "field is *not* required. If it is provided in the", "'eventHubEndpoints', 'type': '{EventHubProperties}'}, 'routing': {'key': 'routing', 'type': 'RoutingProperties'}, 'storage_endpoints': {'key':", "__init__( self, **kwargs ): super(EndpointHealthDataListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None)", "{'key': 'capacity', 'type': 'IotHubCapacity'}, } def __init__( self, **kwargs ):", "satisfy the condition are routed. Currently only one endpoint is", "sending a request. :param value: The array of JobResponse objects.", "IoT hub. :type event_hub_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.EventHubProperties] :param routing: The routing", "None) class RoutingProperties(msrest.serialization.Model): \"\"\"The routing related properties of the IoT", "\"\"\"The properties of an IoT hub shared access policy. All", "of consumer groups objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EventHubConsumerGroupInfo] :ivar next_link: The", "'str'}, 'name': {'key': 'name', 'type': 'str'}, 'subscription_id': {'key': 'subscriptionId', 'type':", "queue endpoints that IoT hub routes the messages to, based", "a fall-back route when none of the conditions specified in", "names must be unique across endpoint types. The name need", "type: The resource type. :vartype type: str :param location: Required.", "= { 'tags': {'key': 'tags', 'type': 'object'}, 'properties': {'key': 'properties',", "upload#file-upload-notification-configuration-options. :type sas_ttl_as_iso8601: ~datetime.timedelta :param connection_string: Required. The connection string", "of testing one route. :param result: Result of testing route.", "\"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The", "} _attribute_map = { 'job_id': {'key': 'jobId', 'type': 'str'}, 'start_time_utc':", "include: \"RegistryRead\", \"RegistryWrite\", \"ServiceConnect\", \"DeviceConnect\", \"RegistryRead, RegistryWrite\", \"RegistryRead, ServiceConnect\", \"RegistryRead,", "'default': {'key': 'default', 'type': 'long'}, 'scale_type': {'key': 'scaleType', 'type': 'str'},", "\"unknown\", \"healthy\", \"unhealthy\", \"dead\". :type health_status: str or ~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthStatus \"\"\"", "sending a request. :ivar provider: Service provider: Microsoft Devices. :vartype", "= kwargs['exclude_keys'] class FailoverInput(msrest.serialization.Model): \"\"\"Use to provide failover region when", "'minimum': 10485760}, } _attribute_map = { 'connection_string': {'key': 'connectionString', 'type':", "ignored when sending a request. :param authorization_policies: The shared access", "error happened. :type column: int \"\"\" _attribute_map = { 'line':", "rule that your IoT hub uses to route messages to", "name. :vartype name: str :ivar type: The resource type. :vartype", "of the parent job, if any. :vartype parent_job_id: str \"\"\"", "order to send to Azure. :param input_blob_container_uri: Required. The input", "= None self.message = None self.details = None class EventHubConsumerGroupInfo(msrest.serialization.Model):", "{'readonly': True}, 'sku': {'required': True}, 'capacity': {'required': True}, } _attribute_map", "type: str :param unit: Unit of IotHub type. :type unit:", "License. See License.txt in the project root for license information.", ":type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificateProperties :ivar id: The resource identifier. :vartype id:", "'type': '[str]'}, } def __init__( self, **kwargs ): super(EnrichmentProperties, self).__init__(**kwargs)", ":type retention_time_in_days: long :param partition_count: The number of partitions for", "{ 'retention_time_in_days': {'key': 'retentionTimeInDays', 'type': 'long'}, 'partition_count': {'key': 'partitionCount', 'type':", "list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :param ip_filter_rules: The IP filter rules. :type ip_filter_rules: list[~azure.mgmt.iothub.v2019_11_04.models.IpFilterRule]", "for cloud-to-device messages. :type feedback: ~azure.mgmt.iothub.v2019_11_04.models.FeedbackProperties \"\"\" _validation = {", ":ivar name: The name of the quota metric. :vartype name:", "'rights', 'type': 'str'}, } def __init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRule,", "location: Required. The resource location. :type location: str :param tags:", "lock_duration_as_iso8601: The lock duration for the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages.", "str \"\"\" _attribute_map = { 'certificate': {'key': 'certificate', 'type': 'str'},", "'routes': {'key': 'routes', 'type': '[RouteProperties]'}, 'fallback_route': {'key': 'fallbackRoute', 'type': 'FallbackRouteProperties'},", "'system_properties': {'key': 'systemProperties', 'type': '{str}'}, } def __init__( self, **kwargs", "def __init__( self, **kwargs ): super(CertificatePropertiesWithNonce, self).__init__(**kwargs) self.subject = None", "self.endpoint_names = kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class FeedbackProperties(msrest.serialization.Model): \"\"\"The properties", "to built-in and custom endpoints. See: https://aka.ms/telemetryoneventgrid. :type enrichments: list[~azure.mgmt.iothub.v2019_11_04.models.EnrichmentProperties]", "indicating whether a given IoT hub name is available. Variables", "FeedbackProperties(msrest.serialization.Model): \"\"\"The properties of the feedback queue for cloud-to-device messages.", "Proof-Of-Possession flow. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificatePropertiesWithNonce :ivar id: The resource identifier.", "action | delete}. :vartype name: str :param display: The object", "'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, } def __init__(", "self, **kwargs ): super(UserSubscriptionQuota, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.type", "next_link: URL to get the next set of operation list", "status shows that the endpoint is not accepting messages as", "The resource name. :vartype name: str :ivar type: The resource", "parent job, if any. :vartype parent_job_id: str \"\"\" _validation =", "{'readonly': True}, } _attribute_map = { 'name_available': {'key': 'nameAvailable', 'type':", "{ 'value': {'key': 'value', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'next_link': {'key': 'nextLink', 'type':", "upload files. Currently you can configure only one Azure Storage", "{ 'job_id': {'key': 'jobId', 'type': 'str'}, 'start_time_utc': {'key': 'startTimeUtc', 'type':", "None) self.next_link = None class IotHubLocationDescription(msrest.serialization.Model): \"\"\"Public representation of one", "\"\"\"The properties indicating whether a given IoT hub name is", "**kwargs ): super(CertificateVerificationDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None) class CertificateWithNonceDescription(msrest.serialization.Model):", ":param condition: The condition that is evaluated to apply the", "be delivered to built-in and custom endpoints. See: https://aka.ms/telemetryoneventgrid. :type", "{'required': True}, 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'container_name': {'required': True},", ":param features: The capabilities and features enabled for the IoT", "when none of the conditions specified in the 'routes' section", "{'readonly': True}, } _attribute_map = { 'value': {'key': 'value', 'type':", "= kwargs.get('retention_time_in_days', None) self.partition_count = kwargs.get('partition_count', None) self.partition_ids = None", "= kwargs.get('twin', None) class TestAllRoutesResult(msrest.serialization.Model): \"\"\"Result of testing all routes.", "__init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRule, self).__init__(**kwargs) self.key_name = kwargs['key_name'] self.primary_key", "'type': 'str'}, } def __init__( self, **kwargs ): super(CertificateDescription, self).__init__(**kwargs)", "self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class EnrichmentProperties(msrest.serialization.Model):", "lock duration for the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages. :type lock_duration_as_iso8601:", "\"\"\"The JSON-serialized array of IotHubDescription objects with a next link.", "use to secure a connection to the IoT hub. :type", "DeviceConnect\", \"ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect\", \"RegistryRead, RegistryWrite, DeviceConnect\", \"RegistryRead,", "\"B2\", \"B3\". :type name: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSku :ivar tier: The", "the custom endpoints to which your IoT hub routes messages", "_attribute_map = { 'compilation_errors': {'key': 'compilationErrors', 'type': '[RouteCompilationError]'}, } def", ":ivar etag: The etag. :vartype etag: str \"\"\" _validation =", "\"\"\" _validation = { 'input_blob_container_uri': {'required': True}, 'output_blob_container_uri': {'required': True},", "**kwargs ): super(CertificateDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id =", "list of Azure Storage endpoints where you can upload files.", "= kwargs.get('enrichments', None) class RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to service", "\"enqueued\", \"running\", \"completed\", \"failed\", \"cancelled\". :vartype status: str or ~azure.mgmt.iothub.v2019_11_04.models.JobStatus", "to identify errors and monitor issues with endpoints. The 'unknown'", "self, **kwargs ): super(TestRouteResultDetails, self).__init__(**kwargs) self.compilation_errors = kwargs.get('compilation_errors', None) class", "the built-in Event Hubs endpoint. :type event_hubs: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingEventHubProperties] :param storage_containers:", "= None self.certificate = None class CertificateVerificationDescription(msrest.serialization.Model): \"\"\"The JSON-serialized leaf", "that your IoT hub applies to messages delivered to endpoints.", ":type failover_region: str \"\"\" _validation = { 'failover_region': {'required': True},", "locations: Primary and secondary location for iot hub. :vartype locations:", "= kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) class RoutingStorageContainerProperties(msrest.serialization.Model): \"\"\"The", "of time for which the SAS URI generated by IoT", "kwargs.get('endpoint_id', None) self.health_status = kwargs.get('health_status', None) class EndpointHealthDataListResult(msrest.serialization.Model): \"\"\"The JSON-serialized", "to true by default. For grammar, See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition:", "'reason', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, } def", "The name can only include alphanumeric characters, periods, underscores, hyphens", "\"\"\" _validation = { 'code': {'readonly': True}, 'http_status_code': {'readonly': True},", "'maxDeliveryCount', 'type': 'int'}, } def __init__( self, **kwargs ): super(FeedbackProperties,", "super(RoutingTwin, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None)", "def __init__( self, **kwargs ): super(StorageEndpointProperties, self).__init__(**kwargs) self.sas_ttl_as_iso8601 = kwargs.get('sas_ttl_as_iso8601',", "} _attribute_map = { 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'name':", "a request. :param value: The array of quota metrics objects.", "): super(RouteErrorRange, self).__init__(**kwargs) self.start = kwargs.get('start', None) self.end = kwargs.get('end',", "The primary region is where the IoT hub is currently", "kwargs['action'] self.ip_mask = kwargs['ip_mask'] class JobResponse(msrest.serialization.Model): \"\"\"The properties of the", "the fallback route. IoT Hub uses these properties when it", "name is available. Variables are only populated by the server,", "class FallbackRouteProperties(msrest.serialization.Model): \"\"\"The properties of the fallback route. IoT Hub", "Required. The key or name for the enrichment property. :type", "thumbprint: str :ivar is_verified: Determines whether certificate has been verified.", "True}, } _attribute_map = { 'authorization_policies': {'key': 'authorizationPolicies', 'type': '[SharedAccessSignatureAuthorizationRule]'},", "'type': '[str]'}, 'is_enabled': {'key': 'isEnabled', 'type': 'bool'}, } def __init__(", "partition_count: int :ivar partition_ids: The partition ids in the Event", "Required. IotHub capacity. :type capacity: ~azure.mgmt.iothub.v2019_11_04.models.IotHubCapacity \"\"\" _validation = {", "str :param display: The object that represents the operation. :type", "{'key': 'tier', 'type': 'str'}, 'capacity': {'key': 'capacity', 'type': 'long'}, }", "properties. :type app_properties: dict[str, str] :param system_properties: System properties. :type", ":type max_delivery_count: int :param default_ttl_as_iso8601: The default time to live", "evaluating route. :param message: Route error message. :type message: str", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type max_delivery_count: int \"\"\" _validation = { 'max_delivery_count':", "{'key': 'sku', 'type': 'IotHubSkuInfo'}, } def __init__( self, **kwargs ):", "\"\"\" _validation = { 'key_name': {'required': True}, 'rights': {'required': True},", "kwargs.get('enrichments', None) class RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to service bus", "_attribute_map = { 'lock_duration_as_iso8601': {'key': 'lockDurationAsIso8601', 'type': 'duration'}, 'ttl_as_iso8601': {'key':", "service_bus_topics: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusTopicEndpointProperties] :param event_hubs: The list of Event Hubs endpoints", "{'key': 'maximum', 'type': 'long'}, 'default': {'key': 'default', 'type': 'long'}, 'scale_type':", "{'key': 'locations', 'type': '[IotHubLocationDescription]'}, } def __init__( self, **kwargs ):", "-------------------------------------------------------------------------- from azure.core.exceptions import HttpResponseError import msrest.serialization class CertificateBodyDescription(msrest.serialization.Model): \"\"\"The", "_validation = { 'key_name': {'required': True}, 'rights': {'required': True}, }", "class EndpointHealthData(msrest.serialization.Model): \"\"\"The health data for an endpoint. :param endpoint_id:", "'resource_group': {'key': 'resourceGroup', 'type': 'str'}, 'container_name': {'key': 'containerName', 'type': 'str'},", "Required. The name of the shared access policy. :type key_name:", "'reason': {'key': 'reason', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'},", "features: The capabilities and features enabled for the IoT hub.", "\"\"\"The common properties of an Azure resource. Variables are only", "an optional parameter. :param tags: A set of tags. Twin", "self, **kwargs ): super(RoutingTwin, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) self.properties", "in the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to- device-messages. :type max_delivery_count: int", "kwargs.get('default_ttl_as_iso8601', None) self.feedback = kwargs.get('feedback', None) class EndpointHealthData(msrest.serialization.Model): \"\"\"The health", "or ~azure.mgmt.iothub.v2019_11_04.models.JobType :ivar status: The status of the job. Possible", "Hub has established an eventually consistent state of health. The", "causes an error to be thrown. Not specifying a value", "\"\"\"Json-serialized array of User subscription quota response. Variables are only", "kwargs.get('health_status', None) class EndpointHealthDataListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of EndpointHealthData objects", "None) self.max_delivery_count = kwargs.get('max_delivery_count', None) class Name(msrest.serialization.Model): \"\"\"Name of Iot", "{ 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type':", "{'key': 'startTimeUtc', 'type': 'rfc-1123'}, 'end_time_utc': {'key': 'endTimeUtc', 'type': 'rfc-1123'}, 'type':", "'type': 'str'}, 'expiry': {'key': 'expiry', 'type': 'rfc-1123'}, 'thumbprint': {'key': 'thumbprint',", "str :ivar host_name: The name of the host. :vartype host_name:", "ErrorDetails(msrest.serialization.Model): \"\"\"Error details. Variables are only populated by the server,", "role: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubReplicaRoleType \"\"\" _attribute_map = { 'location': {'key':", "'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'IotHubProperties'},", "# -------------------------------------------------------------------------- from azure.core.exceptions import HttpResponseError import msrest.serialization class CertificateBodyDescription(msrest.serialization.Model):", "'code', 'type': 'str'}, 'http_status_code': {'key': 'httpStatusCode', 'type': 'str'}, 'message': {'key':", "message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param route: Required. Route properties. :type", "None) self.resource_group = kwargs.get('resource_group', None) class RoutingMessage(msrest.serialization.Model): \"\"\"Routing message. :param", "'properties': {'key': 'properties', 'type': 'CertificateProperties'}, 'id': {'key': 'id', 'type': 'str'},", "array of Endpoint health data. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthData] :ivar next_link:", "enabled for the IoT hub. Possible values include: \"None\", \"DeviceManagement\".", "certificate: The certificate content. :vartype certificate: str \"\"\" _validation =", "RegistryWrite, ServiceConnect\", \"RegistryRead, RegistryWrite, DeviceConnect\", \"RegistryRead, ServiceConnect, DeviceConnect\", \"RegistryWrite, ServiceConnect,", "'type': 'str'}, 'capacity': {'key': 'capacity', 'type': 'long'}, } def __init__(", "The desired action for requests captured by this rule. Possible", "localized_value: str \"\"\" _attribute_map = { 'value': {'key': 'value', 'type':", "True}, } _attribute_map = { 'code': {'key': 'code', 'type': 'str'},", "routes messages to the fallback endpoint. All required parameters must", "self.max_delivery_count = kwargs.get('max_delivery_count', None) class Name(msrest.serialization.Model): \"\"\"Name of Iot Hub", "= kwargs.get('limit', None) self.name = kwargs.get('name', None) class UserSubscriptionQuotaListResult(msrest.serialization.Model): \"\"\"Json-serialized", ":param routes: JSON-serialized array of matched routes. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.MatchedRoute]", "= kwargs.get('cloud_to_device', None) self.comments = kwargs.get('comments', None) self.features = kwargs.get('features',", "'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key':", "ttl_as_iso8601: The period of time for which a message is", "{'key': 'role', 'type': 'str'}, } def __init__( self, **kwargs ):", "IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :param endpoints: The properties related to", "a resource is provisioned. :param location: The name of the", "{ 'failover_region': {'key': 'failoverRegion', 'type': 'str'}, } def __init__( self,", "value: list[~azure.mgmt.iothub.v2019_11_04.models.CertificateDescription] \"\"\" _attribute_map = { 'value': {'key': 'value', 'type':", "MUST have its key as $default. Specifying more than one", "super(Operation, self).__init__(**kwargs) self.name = None self.display = kwargs.get('display', None) class", "self.disabled_device_count = None class RouteCompilationError(msrest.serialization.Model): \"\"\"Compilation error when evaluating route.", "types. The name need not be the same as the", "See: https://aka.ms/telemetryoneventgrid. :type enrichments: list[~azure.mgmt.iothub.v2019_11_04.models.EnrichmentProperties] \"\"\" _attribute_map = { 'endpoints':", "{'required': True}, 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, } _attribute_map =", "See: https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits. :type capacity: long \"\"\" _validation = { 'name':", "'scaleType', 'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubCapacity,", "= { 'max_delivery_count': {'maximum': 100, 'minimum': 1}, } _attribute_map =", "minimum: long :ivar maximum: The maximum number of units. :vartype", "set of tags. Resource tags. :type tags: dict[str, str] \"\"\"", "of the host. :vartype host_name: str :param event_hub_endpoints: The Event", "evaluate to true by default. For grammar, See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type", ":param endpoint_id: Id of the endpoint. :type endpoint_id: str :param", "__init__( self, **kwargs ): super(ErrorDetails, self).__init__(**kwargs) self.code = None self.http_status_code", "value: Required. The value for the enrichment property. :type value:", "not accepting messages as expected and IoT Hub is retrying", "{'readonly': True}, } _attribute_map = { 'total_device_count': {'key': 'totalDeviceCount', 'type':", ":type unit: str :param current_value: Current number of IotHub type.", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to- device-messages. :type max_delivery_count: int :param default_ttl_as_iso8601: The default", "values are 'avro', 'avrodeflate', and 'JSON'. Default value is 'avro'.", "or ~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthStatus \"\"\" _attribute_map = { 'endpoint_id': {'key': 'endpointId', 'type':", "None self.current_value = None self.max_value = None class IotHubQuotaMetricInfoListResult(msrest.serialization.Model): \"\"\"The", "kwargs['route'] self.twin = kwargs.get('twin', None) class TestRouteResult(msrest.serialization.Model): \"\"\"Result of testing", "Possible values include: \"unknown\", \"enqueued\", \"running\", \"completed\", \"failed\", \"cancelled\". :vartype", "'properties', 'type': 'IotHubProperties'}, 'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'}, } def", "{'key': 'source', 'type': 'str'}, 'condition': {'key': 'condition', 'type': 'str'}, 'endpoint_names':", "of units. :vartype maximum: long :ivar default: The default number", "units. :vartype minimum: long :ivar maximum: The maximum number of", "= kwargs.get('authorization_policies', None) self.ip_filter_rules = kwargs.get('ip_filter_rules', None) self.provisioning_state = None", "None self.created = None self.updated = None self.verification_code = None", "'type': 'str'}, 'output_blob_container_uri': {'key': 'outputBlobContainerUri', 'type': 'str'}, } def __init__(", "self).__init__(**kwargs) self.sas_ttl_as_iso8601 = kwargs.get('sas_ttl_as_iso8601', None) self.connection_string = kwargs['connection_string'] self.container_name =", "{'required': True}, 'capacity': {'required': True}, } _attribute_map = { 'resource_type':", ":vartype endpoint: str \"\"\" _validation = { 'partition_ids': {'readonly': True},", "file upload notifications are enabled. :type enable_file_upload_notifications: bool :param cloud_to_device:", "when sending a request. :param value: The array of IotHubSkuDescription.", "= kwargs['capacity'] class IotHubSkuDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubSkuDescription objects", "'updated': {'key': 'updated', 'type': 'rfc-1123'}, 'certificate': {'key': 'certificate', 'type': 'str'},", "self, **kwargs ): super(StorageEndpointProperties, self).__init__(**kwargs) self.sas_ttl_as_iso8601 = kwargs.get('sas_ttl_as_iso8601', None) self.connection_string", "{'key': 'endpointId', 'type': 'str'}, 'health_status': {'key': 'healthStatus', 'type': 'str'}, }", "name. :vartype name: str :ivar type: the resource type. :vartype", "RegistryWrite, DeviceConnect\", \"RegistryRead, ServiceConnect, DeviceConnect\", \"RegistryWrite, ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite,", "message: Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param route: Required. Route", "True}, 'created': {'readonly': True}, 'updated': {'readonly': True}, 'verification_code': {'readonly': True},", "shows that the endpoint is not accepting messages as expected", "'str'}, 'localized_value': {'key': 'localizedValue', 'type': 'str'}, } def __init__( self,", "self.name = None self.current_value = None self.max_value = None class", "A set of tags. Resource tags. :type tags: dict[str, str]", "name. :vartype subject: str :ivar expiry: The certificate's expiration date", "{'required': True}, } _attribute_map = { 'key_name': {'key': 'keyName', 'type':", "\"\"\"The description of an X509 CA Certificate. Variables are only", "will be ignored when sending a request. :ivar name: The", "'message': {'readonly': True}, 'details': {'readonly': True}, } _attribute_map = {", "identifier of the storage account. :type subscription_id: str :param resource_group:", "True}, 'value': {'required': True}, 'endpoint_names': {'required': True, 'min_items': 1}, }", "{'required': True}, 'exclude_keys': {'required': True}, } _attribute_map = { 'export_blob_container_uri':", "'connection_string': {'key': 'connectionString', 'type': 'str'}, 'container_name': {'key': 'containerName', 'type': 'str'},", "\"\"\"Twin reference input parameter. This is an optional parameter. :param", "failed over to. :type failover_region: str \"\"\" _validation = {", "= { 'name': {'key': 'name', 'type': 'str'}, 'source': {'key': 'source',", "'type': '{StorageEndpointProperties}'}, 'messaging_endpoints': {'key': 'messagingEndpoints', 'type': '{MessagingEndpointProperties}'}, 'enable_file_upload_notifications': {'key': 'enableFileUploadNotifications',", "operation. :vartype operation: str :ivar description: Description of the operation.", "True}, 'name': {'readonly': True, 'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly': True}, 'location':", "name of the certificate. :vartype name: str :ivar etag: The", "routes messages based on the routing rules. A maximum of", "to Azure. :param connection_string: Required. The connection string of the", "The list of Event Hubs endpoints that IoT hub routes", "self).__init__(**kwargs) self.compilation_errors = kwargs.get('compilation_errors', None) class UserSubscriptionQuota(msrest.serialization.Model): \"\"\"User subscription quota", "identity registry. :vartype total_device_count: long :ivar enabled_device_count: The count of", "CA Certificate. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificateProperties :ivar id: The resource identifier.", "'properties', 'type': '{str}'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key':", "be unique. :type name: str :param source: Required. The source", "\"\"\"Information about the SKU of the IoT hub. Variables are", "{'readonly': True}, 'location': {'required': True}, } _attribute_map = { 'id':", "super(IpFilterRule, self).__init__(**kwargs) self.filter_name = kwargs['filter_name'] self.action = kwargs['action'] self.ip_mask =", ":param name: The name of the route. The name can", "= None class EventHubConsumerGroupsListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of Event Hub-compatible", "): super(MatchedRoute, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) class MessagingEndpointProperties(msrest.serialization.Model): \"\"\"The", "kwargs.get('message', None) self.severity = kwargs.get('severity', None) self.location = kwargs.get('location', None)", "self.location = kwargs.get('location', None) self.role = kwargs.get('role', None) class IotHubNameAvailabilityInfo(msrest.serialization.Model):", ":type max_chunk_size_in_bytes: int :param encoding: Encoding that is used to", "= kwargs.get('result', None) self.details = kwargs.get('details', None) class TestRouteResultDetails(msrest.serialization.Model): \"\"\"Detailed", "_attribute_map = { 'failover_region': {'key': 'failoverRegion', 'type': 'str'}, } def", "Possible values include: \"Accept\", \"Reject\". :type action: str or ~azure.mgmt.iothub.v2019_11_04.models.IpFilterActionType", "super(MatchedRoute, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) class MessagingEndpointProperties(msrest.serialization.Model): \"\"\"The properties", "} def __init__( self, **kwargs ): super(IotHubSkuDescription, self).__init__(**kwargs) self.resource_type =", "{'key': 'maxValue', 'type': 'long'}, } def __init__( self, **kwargs ):", "to be thrown. :type storage_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.StorageEndpointProperties] :param messaging_endpoints: The", "'str'}, } def __init__( self, **kwargs ): super(EndpointHealthData, self).__init__(**kwargs) self.endpoint_id", "None class ExportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters when requesting an", "value: List of consumer groups objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EventHubConsumerGroupInfo] :ivar", "when sending a request. :ivar job_id: The job identifier. :vartype", "'CertificateProperties'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type':", "a request. :param value: The list of shared access policies.", "**kwargs ): super(RoutingServiceBusTopicEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name']", "Region the hub will be failed over to. :type failover_region:", "} _attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'http_status_code':", "endpoint is allowed across all endpoint types for free hubs.", ":param end: End where the route error happened. :type end:", "{ 'location': {'key': 'location', 'type': 'str'}, 'role': {'key': 'role', 'type':", "'IotHubProperties'}, 'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'}, } def __init__( self,", "request. :param value: List of consumer groups objects. :type value:", "routes. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.MatchedRoute] \"\"\" _attribute_map = { 'routes': {'key':", "retention time for device-to-cloud messages in days. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages. :type", "= kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) self.container_name = kwargs['container_name']", "'type': 'int'}, } def __init__( self, **kwargs ): super(FeedbackProperties, self).__init__(**kwargs)", "string that contains the IP address range in CIDR notation", "of the resource group of the event hub endpoint. :type", ":param sku: Required. The type of the resource. :type sku:", "'minimum': {'readonly': True, 'maximum': 1, 'minimum': 1}, 'maximum': {'readonly': True},", "an error to be thrown. :type storage_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.StorageEndpointProperties] :param", "{'key': 'description', 'type': 'str'}, } def __init__( self, **kwargs ):", "_attribute_map = { 'message': {'key': 'message', 'type': 'RoutingMessage'}, 'route': {'key':", "built-in and custom endpoints. A maximum of 100 routing rules", "properties: The description of an X509 CA Certificate. :type properties:", "None) self.id = None self.name = None self.etag = None", "condition: The condition which is evaluated in order to apply", "bus queue endpoint. :type connection_string: str :param name: Required. The", "(R) AutoRest Code Generator. # Changes may cause incorrect behavior", "endpoint. :type event_hubs: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingEventHubProperties] :param storage_containers: The list of storage", "subject: The certificate's subject name. :vartype subject: str :ivar expiry:", "'type': 'bool'}, } def __init__( self, **kwargs ): super(ExportDevicesRequest, self).__init__(**kwargs)", "is regenerated. # -------------------------------------------------------------------------- from azure.core.exceptions import HttpResponseError import msrest.serialization", ":param is_enabled: Required. Used to specify whether the fallback route", "update the tags on an IoT Hub instance. :param tags:", "_attribute_map = { 'input_blob_container_uri': {'key': 'inputBlobContainerUri', 'type': 'str'}, 'output_blob_container_uri': {'key':", ":ivar name: The resource name. :vartype name: str :ivar type:", "provisioned. :param location: The name of the Azure region. :type", "self, **kwargs ): super(RoutingMessage, self).__init__(**kwargs) self.body = kwargs.get('body', None) self.app_properties", "ids in the Event Hub-compatible endpoint. :vartype partition_ids: list[str] :ivar", "str :ivar resource: Resource Type: IotHubs. :vartype resource: str :ivar", "'features', 'type': 'str'}, 'locations': {'key': 'locations', 'type': '[IotHubLocationDescription]'}, } def", "in order to send to Azure. :param input_blob_container_uri: Required. The", "messages have been delivered to or rejected from this endpoint.", "self.role = kwargs.get('role', None) class IotHubNameAvailabilityInfo(msrest.serialization.Model): \"\"\"The properties indicating whether", "} def __init__( self, **kwargs ): super(MessagingEndpointProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 =", "value: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :ivar next_link: The next link. :vartype next_link: str", "**kwargs ): super(EndpointHealthDataListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link =", "for file upload. All required parameters must be populated in", "kwargs.get('certificate', None) class CertificateWithNonceDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables are only", "{'readonly': True}, 'next_link': {'readonly': True}, } _attribute_map = { 'value':", "= None self.type = None class CertificateListDescription(msrest.serialization.Model): \"\"\"The JSON-serialized array", "the IoT hub attempts to deliver a message. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload.", "\"cancelled\". :vartype status: str or ~azure.mgmt.iothub.v2019_11_04.models.JobStatus :ivar failure_reason: If status", "the routing rules. :type storage_containers: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerProperties] \"\"\" _attribute_map = {", "a request. All required parameters must be populated in order", ":vartype details: str \"\"\" _validation = { 'code': {'readonly': True},", "this dictionary is events. This key has to be present", "{'readonly': True}, 'created': {'readonly': True}, 'updated': {'readonly': True}, } _attribute_map", "failover_region: Required. Region the hub will be failed over to.", ":ivar name: The name of the certificate. :vartype name: str", "_attribute_map = { 'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'}, 'default_ttl_as_iso8601': {'key':", "'type': 'str'}, 'locations': {'key': 'locations', 'type': '[IotHubLocationDescription]'}, } def __init__(", ":type rights: str or ~azure.mgmt.iothub.v2019_11_04.models.AccessRights \"\"\" _validation = { 'key_name':", ".cer file or just .pem file content. :type certificate: str", "list[~azure.mgmt.iothub.v2019_11_04.models.EnrichmentProperties] \"\"\" _attribute_map = { 'endpoints': {'key': 'endpoints', 'type': 'RoutingEndpoints'},", "kwargs.get('app_properties', None) self.system_properties = kwargs.get('system_properties', None) class RoutingProperties(msrest.serialization.Model): \"\"\"The routing", ":param output_blob_container_uri: Required. The output blob container URI. :type output_blob_container_uri:", "= { 'routing_source': {'key': 'routingSource', 'type': 'str'}, 'message': {'key': 'message',", "The Event Hub-compatible name. :vartype path: str :ivar endpoint: The", "in the Event Hub-compatible endpoint. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#device-to-cloud-messages. :type partition_count:", "{'readonly': True}, 'default': {'readonly': True}, 'scale_type': {'readonly': True}, } _attribute_map", "Event Hub-compatible endpoint properties. The only possible keys to this", "'filterName', 'type': 'str'}, 'action': {'key': 'action', 'type': 'str'}, 'ip_mask': {'key':", "'resourceGroup', 'type': 'str'}, } def __init__( self, **kwargs ): super(RoutingEventHubProperties,", "'type': 'str'}, } def __init__( self, **kwargs ): super(EndpointHealthDataListResult, self).__init__(**kwargs)", "'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'},", "self.value = kwargs['value'] self.endpoint_names = kwargs['endpoint_names'] class ErrorDetails(msrest.serialization.Model): \"\"\"Error details.", "routing rule that your IoT hub uses to route messages", "'name': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name',", ":ivar enabled_device_count: The count of enabled devices in the identity", "~azure.mgmt.iothub.v2019_11_04.models.JobStatus :ivar failure_reason: If status == failed, this string containing", "\"\"\"The description of the IoT hub. Variables are only populated", "'endpoints': {'key': 'endpoints', 'type': 'RoutingEndpoints'}, 'routes': {'key': 'routes', 'type': '[RouteProperties]'},", "{ 'name': {'key': 'name', 'type': 'str'}, 'tier': {'key': 'tier', 'type':", "the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type max_delivery_count: int \"\"\"", "'ip_mask': {'required': True}, } _attribute_map = { 'filter_name': {'key': 'filterName',", "link to get the next set of results. Variables are", "policy. Possible values include: \"RegistryRead\", \"RegistryWrite\", \"ServiceConnect\", \"DeviceConnect\", \"RegistryRead, RegistryWrite\",", ":ivar status: The status of the job. Possible values include:", "in the identity registry. :vartype enabled_device_count: long :ivar disabled_device_count: The", "'type': 'RouteErrorRange'}, } def __init__( self, **kwargs ): super(RouteCompilationError, self).__init__(**kwargs)", "EventHubConsumerGroupInfo(msrest.serialization.Model): \"\"\"The properties of the EventHubConsumerGroupInfo object. Variables are only", ":param properties: :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwinProperties \"\"\" _attribute_map = { 'tags':", ":param capacity: Required. IotHub capacity. :type capacity: ~azure.mgmt.iothub.v2019_11_04.models.IotHubCapacity \"\"\" _validation", "= { 'filter_name': {'required': True}, 'action': {'required': True}, 'ip_mask': {'required':", "~azure.mgmt.iothub.v2019_11_04.models.FeedbackProperties \"\"\" _validation = { 'max_delivery_count': {'maximum': 100, 'minimum': 1},", "'[SharedAccessSignatureAuthorizationRule]'}, 'ip_filter_rules': {'key': 'ipFilterRules', 'type': '[IpFilterRule]'}, 'provisioning_state': {'key': 'provisioningState', 'type':", "required parameters must be populated in order to send to", "enable_file_upload_notifications: If True, file upload notifications are enabled. :type enable_file_upload_notifications:", "kwargs['location'] self.tags = kwargs.get('tags', None) class IotHubDescription(Resource): \"\"\"The description of", "\"RegistryRead, ServiceConnect\", \"RegistryRead, DeviceConnect\", \"RegistryWrite, ServiceConnect\", \"RegistryWrite, DeviceConnect\", \"ServiceConnect, DeviceConnect\",", ":ivar expiry: The certificate's expiration date and time. :vartype expiry:", "= None self.message = kwargs.get('message', None) class IotHubProperties(msrest.serialization.Model): \"\"\"The properties", "'type': 'long'}, 'partition_count': {'key': 'partitionCount', 'type': 'int'}, 'partition_ids': {'key': 'partitionIds',", "to the IoT hub. :type authorization_policies: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :param ip_filter_rules: The", "'long'}, 'enabled_device_count': {'key': 'enabledDeviceCount', 'type': 'long'}, 'disabled_device_count': {'key': 'disabledDeviceCount', 'type':", "ignored when sending a request. :param value: List of consumer", "during export. :type exclude_keys: bool \"\"\" _validation = { 'export_blob_container_uri':", "JobResponse(msrest.serialization.Model): \"\"\"The properties of the Job Response object. Variables are", "maximum of 5 routing rules are allowed for free hubs.", "'type': 'str'}, 'file_name_format': {'key': 'fileNameFormat', 'type': 'str'}, 'batch_frequency_in_seconds': {'key': 'batchFrequencyInSeconds',", "300 seconds. :type batch_frequency_in_seconds: int :param max_chunk_size_in_bytes: Maximum number of", "str :ivar type: The resource type. :vartype type: str :param", "{ 'id': {'readonly': True}, 'name': {'readonly': True, 'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type':", "a request. :ivar total_device_count: The total count of devices in", "kwargs['name'] class OperationListResult(msrest.serialization.Model): \"\"\"Result of the request to list IoT", "None self.message = None self.details = None class EventHubConsumerGroupInfo(msrest.serialization.Model): \"\"\"The", ":param unit: Unit of IotHub type. :type unit: str :param", ":vartype http_status_code: str :ivar message: The error message. :vartype message:", "max_delivery_count: The number of times the IoT hub attempts to", "__init__( self, **kwargs ): super(IotHubProperties, self).__init__(**kwargs) self.authorization_policies = kwargs.get('authorization_policies', None)", "the endpoint is not accepting messages, after IoT Hub retried", "kwargs.get('current_value', None) self.limit = kwargs.get('limit', None) self.name = kwargs.get('name', None)", ":param health_status: Health statuses have following meanings. The 'healthy' status", "True}, 'endpoint_names': {'required': True, 'max_items': 1, 'min_items': 1}, 'is_enabled': {'required':", "for free hubs. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.RouteProperties] :param fallback_route: The properties", "'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, } _attribute_map = { 'connection_string':", "{'key': 'enabledDeviceCount', 'type': 'long'}, 'disabled_device_count': {'key': 'disabledDeviceCount', 'type': 'long'}, }", "'location', 'type': 'str'}, 'role': {'key': 'role', 'type': 'str'}, } def", "to be delivered to built-in and custom endpoints. See: https://aka.ms/telemetryoneventgrid.", "value: str :param endpoint_names: Required. The list of endpoints for", ":type value: str :param endpoint_names: Required. The list of endpoints", ":param display: The object that represents the operation. :type display:", "kwargs['export_blob_container_uri'] self.exclude_keys = kwargs['exclude_keys'] class FailoverInput(msrest.serialization.Model): \"\"\"Use to provide failover", "provider: str :ivar resource: Resource Type: IotHubs. :vartype resource: str", "} def __init__( self, **kwargs ): super(FeedbackProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 =", "kwargs.get('value', None) self.next_link = None class IotHubSkuDescription(msrest.serialization.Model): \"\"\"SKU properties. Variables", ":vartype locations: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubLocationDescription] \"\"\" _validation = { 'provisioning_state': {'readonly': True},", "for cloud-to-device messages. :param lock_duration_as_iso8601: The lock duration for the", "'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'source': {'required': True}, 'endpoint_names': {'required': True, 'max_items': 1,", "that the IoT hub applies to messages to be delivered", "connection_string: Required. The connection string of the service bus topic", "super(RoutingEndpoints, self).__init__(**kwargs) self.service_bus_queues = kwargs.get('service_bus_queues', None) self.service_bus_topics = kwargs.get('service_bus_topics', None)", "rejected from this endpoint. Possible values include: \"unknown\", \"healthy\", \"unhealthy\",", "tags: object :param properties: :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwinProperties \"\"\" _attribute_map =", "container_name: Required. The name of storage container in the storage", "routes. :param routes: JSON-serialized array of matched routes. :type routes:", "Event Hub-compatible endpoint. :vartype partition_ids: list[str] :ivar path: The Event", "filter rules. :type ip_filter_rules: list[~azure.mgmt.iothub.v2019_11_04.models.IpFilterRule] :ivar provisioning_state: The provisioning state.", "'start': {'key': 'start', 'type': 'RouteErrorPosition'}, 'end': {'key': 'end', 'type': 'RouteErrorPosition'},", ":param properties: The description of an X509 CA Certificate. :type", "Required. The desired action for requests captured by this rule.", "for the quota metric. :vartype current_value: long :ivar max_value: The", "enrichments that the IoT hub applies to messages to be", "OperationListResult(msrest.serialization.Model): \"\"\"Result of the request to list IoT Hub operations.", "the service bus topic endpoint. :type subscription_id: str :param resource_group:", "None) class IotHubNameAvailabilityInfo(msrest.serialization.Model): \"\"\"The properties indicating whether a given IoT", "_attribute_map = { 'authorization_policies': {'key': 'authorizationPolicies', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'ip_filter_rules': {'key':", "{'key': 'expiry', 'type': 'rfc-1123'}, 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, 'is_verified':", "class CertificateWithNonceDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables are only populated by", "_attribute_map = { 'key_name': {'key': 'keyName', 'type': 'str'}, 'primary_key': {'key':", "'certificate', 'type': 'str'}, } def __init__( self, **kwargs ): super(CertificateVerificationDescription,", "name of the route. The name can only include alphanumeric", "should be between 60 and 720 seconds. Default value is", "= { 'route': {'required': True}, } _attribute_map = { 'message':", "interval at which blobs are written to storage. Value should", "events, fileNotifications, $default. Endpoint names must be unique across endpoint", "health data for an endpoint. :param endpoint_id: Id of the", "'minimum': 60}, 'max_chunk_size_in_bytes': {'maximum': 524288000, 'minimum': 10485760}, } _attribute_map =", "~azure.mgmt.iothub.v2019_11_04.models.JobType :ivar status: The status of the job. Possible values", "holding only the Tags for a resource, allowing the user", "characters, periods, underscores, hyphens, has a maximum length of 64", "_validation = { 'max_delivery_count': {'maximum': 100, 'minimum': 1}, } _attribute_map", "not include the built-in Event Hubs endpoint. :type event_hubs: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingEventHubProperties]", "status shows that the IoT Hub has not established a", "lock_duration_as_iso8601: The lock duration. See: https://docs.microsoft.com/azure/iot- hub/iot-hub-devguide-file-upload. :type lock_duration_as_iso8601: ~datetime.timedelta", "{'key': 'desired', 'type': 'object'}, 'reported': {'key': 'reported', 'type': 'object'}, }", "): super(EndpointHealthData, self).__init__(**kwargs) self.endpoint_id = kwargs.get('endpoint_id', None) self.health_status = kwargs.get('health_status',", "def __init__( self, **kwargs ): super(EventHubConsumerGroupsListResult, self).__init__(**kwargs) self.value = kwargs.get('value',", "of 100 routing rules are allowed for paid hubs and", "'properties': {'key': 'properties', 'type': 'RoutingTwinProperties'}, } def __init__( self, **kwargs", "\"\"\"The properties of the feedback queue for cloud-to-device messages. :param", "'type': {'key': 'type', 'type': 'str'}, 'unit': {'key': 'unit', 'type': 'str'},", "the next set of results. Variables are only populated by", "request. :ivar subject: The certificate's subject name. :vartype subject: str", "'[EndpointHealthData]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self,", "\"\"\" _attribute_map = { 'result': {'key': 'result', 'type': 'str'}, 'details':", "int :param limit: Numerical limit on IotHub type. :type limit:", "True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'container_name': {'required': True}, 'batch_frequency_in_seconds': {'maximum': 720, 'minimum':", "set to True, causes an error to be thrown. :type", "None) self.name = kwargs.get('name', None) class UserSubscriptionQuotaListResult(msrest.serialization.Model): \"\"\"Json-serialized array of", "class IotHubQuotaMetricInfoListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubQuotaMetricInfo objects with a", "'type': 'str'}, 'properties': {'key': 'properties', 'type': 'IotHubProperties'}, 'sku': {'key': 'sku',", "route. :param compilation_errors: JSON-serialized list of route compilation errors. :type", "region when requesting manual Failover for a hub. All required", "must be unique across endpoint types. The name need not", "the IP filter rule. :type filter_name: str :param action: Required.", "sending a request. :param authorization_policies: The shared access policies you", "True}, } _attribute_map = { 'filter_name': {'key': 'filterName', 'type': 'str'},", "{'key': 'appProperties', 'type': '{str}'}, 'system_properties': {'key': 'systemProperties', 'type': '{str}'}, }", "state of health. The 'dead' status shows that the endpoint", "across all endpoint types for paid hubs and only 1", "} _attribute_map = { 'subject': {'key': 'subject', 'type': 'str'}, 'expiry':", "be between 10485760(10MB) and 524288000(500MB). Default value is 314572800(300MB). :type", "\"\"\"Name of Iot Hub type. :param value: IotHub type. :type", "the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages. :type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601:", "IotHubNameAvailabilityInfo(msrest.serialization.Model): \"\"\"The properties indicating whether a given IoT hub name", "\"\"\"The JSON-serialized array of Event Hub-compatible consumer group names with", "messages in days. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages. :type retention_time_in_days: long :param partition_count:", "send to Azure. :param name: Required. The name of the", "endpoint_names: list[str] :param is_enabled: Required. Used to specify whether a", "routing rule is to be applied to. For example, DeviceMessages.", "date and time. :vartype updated: ~datetime.datetime :ivar verification_code: The certificate's", "the quota metric. :vartype name: str :ivar current_value: The current", "~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition that is evaluated to apply", "kwargs.get('localized_value', None) class Operation(msrest.serialization.Model): \"\"\"IoT Hub REST API operation. Variables", "of the service bus queue endpoint. :type connection_string: str :param", "{'key': 'sku', 'type': 'IotHubSkuInfo'}, 'capacity': {'key': 'capacity', 'type': 'IotHubCapacity'}, }", "'code': {'key': 'code', 'type': 'str'}, 'http_status_code': {'key': 'httpStatusCode', 'type': 'str'},", "of User subscription quota response. Variables are only populated by", "column: Column where the route error happened. :type column: int", "_validation = { 'name': {'readonly': True}, } _attribute_map = {", "kwargs['filter_name'] self.action = kwargs['action'] self.ip_mask = kwargs['ip_mask'] class JobResponse(msrest.serialization.Model): \"\"\"The", "__init__( self, **kwargs ): super(RoutingEventHubProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name", "'endpointNames', 'type': '[str]'}, 'is_enabled': {'key': 'isEnabled', 'type': 'bool'}, } def", "kwargs.get('routing', None) self.storage_endpoints = kwargs.get('storage_endpoints', None) self.messaging_endpoints = kwargs.get('messaging_endpoints', None)", "Operation name: {provider}/{resource}/{read | write | action | delete}. :vartype", "'path', 'type': 'str'}, 'endpoint': {'key': 'endpoint', 'type': 'str'}, } def", "self).__init__(**kwargs) self.id = None self.name = None self.type = None", "class CertificateProperties(msrest.serialization.Model): \"\"\"The description of an X509 CA Certificate. Variables", "(c) Microsoft Corporation. All rights reserved. # Licensed under the", "class UserSubscriptionQuotaListResult(msrest.serialization.Model): \"\"\"Json-serialized array of User subscription quota response. Variables", "'endpoint_names': {'required': True, 'min_items': 1}, } _attribute_map = { 'key':", "'value', 'type': '[IotHubSkuDescription]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def", "'type': '[RoutingStorageContainerProperties]'}, } def __init__( self, **kwargs ): super(RoutingEndpoints, self).__init__(**kwargs)", "requesting an export of all devices in the IoT hub.", "~datetime.timedelta :param max_delivery_count: The number of times the IoT hub", "be unique across endpoint types. The name need not be", "JSON-serialized X509 Certificate. :param certificate: base-64 representation of the X509", "): super(IotHubSkuDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None", "The Event Hub-compatible endpoint properties. The only possible keys to", "def __init__( self, **kwargs ): super(Name, self).__init__(**kwargs) self.value = kwargs.get('value',", "'service_bus_queues': {'key': 'serviceBusQueues', 'type': '[RoutingServiceBusQueueEndpointProperties]'}, 'service_bus_topics': {'key': 'serviceBusTopics', 'type': '[RoutingServiceBusTopicEndpointProperties]'},", "access policy. :type key_name: str :param primary_key: The primary key.", "IotHubs. :vartype resource: str :ivar operation: Name of the operation.", "of the certificate. :vartype name: str :ivar etag: The entity", "include: \"Free\", \"Standard\", \"Basic\". :vartype tier: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuTier :param", "The number of times the IoT hub attempts to deliver", "{'readonly': True}, 'scale_type': {'readonly': True}, } _attribute_map = { 'minimum':", "'type': 'duration'}, 'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'}, } def __init__(", "compilation_errors: JSON-serialized list of route compilation errors. :type compilation_errors: list[~azure.mgmt.iothub.v2019_11_04.models.RouteCompilationError]", "id. :type id: str :param type: Response type. :type type:", "True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[IotHubQuotaMetricInfo]'},", "{iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All parameters are mandatory but can be reordered. :type", "**kwargs ): super(TestAllRoutesResult, self).__init__(**kwargs) self.routes = kwargs.get('routes', None) class TestRouteInput(msrest.serialization.Model):", "The name of the resource group of the service bus", "_attribute_map = { 'name_available': {'key': 'nameAvailable', 'type': 'bool'}, 'reason': {'key':", "'rfc-1123'}, 'type': {'key': 'type', 'type': 'str'}, 'status': {'key': 'status', 'type':", ":ivar partition_ids: The partition ids in the Event Hub-compatible endpoint.", "{'key': 'ttlAsIso8601', 'type': 'duration'}, 'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'}, }", "{'key': 'features', 'type': 'str'}, 'locations': {'key': 'locations', 'type': '[IotHubLocationDescription]'}, }", "__init__( self, **kwargs ): super(CertificatePropertiesWithNonce, self).__init__(**kwargs) self.subject = None self.expiry", "{'readonly': True}, 'message': {'readonly': True}, 'details': {'readonly': True}, } _attribute_map", "'message': {'key': 'message', 'type': 'str'}, 'details': {'key': 'details', 'type': 'str'},", "action for requests captured by this rule. Possible values include:", "'routing', 'type': 'RoutingProperties'}, 'storage_endpoints': {'key': 'storageEndpoints', 'type': '{StorageEndpointProperties}'}, 'messaging_endpoints': {'key':", "property when the enableFileUploadNotifications property is set to True, causes", "and will be ignored when sending a request. All required", "value: The array of IotHubDescription objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubDescription] :ivar", "~azure.mgmt.iothub.v2019_11_04.models.StorageEndpointProperties] :param messaging_endpoints: The messaging endpoint properties for the file", "objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.JobResponse] :ivar next_link: The next link. :vartype", "'{EventHubProperties}'}, 'routing': {'key': 'routing', 'type': 'RoutingProperties'}, 'storage_endpoints': {'key': 'storageEndpoints', 'type':", "a request. :param value: JSON-serialized array of Endpoint health data.", "allowing the user to update the tags on an IoT", "The secondary key. :type secondary_key: str :param rights: Required. The", "a message is available to consume before it is expired", "str :param name: Required. The name that identifies this endpoint.", "are written to storage. Value should be between 60 and", "class IotHubProperties(msrest.serialization.Model): \"\"\"The properties of an IoT hub. Variables are", "populated in order to send to Azure. :param export_blob_container_uri: Required.", "endpoint properties. The only possible keys to this dictionary is", "where a resource is provisioned. :param location: The name of", "kwargs.get('message', None) self.route = kwargs['route'] self.twin = kwargs.get('twin', None) class", "see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param endpoint_names: Required. The list", ":type fallback_route: ~azure.mgmt.iothub.v2019_11_04.models.FallbackRouteProperties :param enrichments: The list of user-provided enrichments", "None) self.sku = kwargs['sku'] class IotHubDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of", "'type', 'type': 'str'}, } def __init__( self, **kwargs ): super(CertificateDescription,", "= kwargs.get('event_hub_endpoints', None) self.routing = kwargs.get('routing', None) self.storage_endpoints = kwargs.get('storage_endpoints',", "certificate: str \"\"\" _validation = { 'subject': {'readonly': True}, 'expiry':", "The resource type. :vartype type: str \"\"\" _validation = {", "The source to which the routing rule is to be", "str \"\"\" _validation = { 'name_available': {'readonly': True}, 'reason': {'readonly':", "the rule. :type ip_mask: str \"\"\" _validation = { 'filter_name':", "super(RoutingTwinProperties, self).__init__(**kwargs) self.desired = kwargs.get('desired', None) self.reported = kwargs.get('reported', None)", "for the IoT hub. :type event_hub_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.EventHubProperties] :param routing:", "{ 'input_blob_container_uri': {'key': 'inputBlobContainerUri', 'type': 'str'}, 'output_blob_container_uri': {'key': 'outputBlobContainerUri', 'type':", "str] \"\"\" _attribute_map = { 'body': {'key': 'body', 'type': 'str'},", "= kwargs.get('resource_group', None) class RoutingStorageContainerProperties(msrest.serialization.Model): \"\"\"The properties related to a", "sending a request. :ivar code: The error code. :vartype code:", "The default number of units. :vartype default: long :ivar scale_type:", "message. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload. :type max_delivery_count: int \"\"\" _validation = {", "range in CIDR notation for the rule. :type ip_mask: str", "'name': {'required': True}, } _attribute_map = { 'name': {'key': 'name',", ":param name: Required. The name of the IoT hub to", "{'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ):", ":type partition_count: int :ivar partition_ids: The partition ids in the", "send to Azure. :param name: The name of the route.", "None) class CertificateProperties(msrest.serialization.Model): \"\"\"The description of an X509 CA Certificate.", "with the endpoint. No messages have been delivered to or", "'authorizationPolicies', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'ip_filter_rules': {'key': 'ipFilterRules', 'type': '[IpFilterRule]'}, 'provisioning_state': {'key':", "**kwargs ): super(CertificateProperties, self).__init__(**kwargs) self.subject = None self.expiry = None", "= { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name',", "to get the next set of operation list results if", "properties of the feedback queue for cloud-to-device messages. :param lock_duration_as_iso8601:", "'type': 'str'}, 'http_status_code': {'key': 'httpStatusCode', 'type': 'str'}, 'message': {'key': 'message',", "{ 'subject': {'key': 'subject', 'type': 'str'}, 'expiry': {'key': 'expiry', 'type':", "_attribute_map = { 'retention_time_in_days': {'key': 'retentionTimeInDays', 'type': 'long'}, 'partition_count': {'key':", ":ivar host_name: The name of the host. :vartype host_name: str", "free hubs. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.RouteProperties] :param fallback_route: The properties of", ":ivar job_id: The job identifier. :vartype job_id: str :ivar start_time_utc:", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#cloud-to-device-messages. :type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count: The number", "'lock_duration_as_iso8601': {'key': 'lockDurationAsIso8601', 'type': 'duration'}, 'ttl_as_iso8601': {'key': 'ttlAsIso8601', 'type': 'duration'},", "IotHubDescription objects with a next link. Variables are only populated", "statistics. Variables are only populated by the server, and will", "be ignored when sending a request. :ivar minimum: The minimum", "messages. :type feedback: ~azure.mgmt.iothub.v2019_11_04.models.FeedbackProperties \"\"\" _validation = { 'max_delivery_count': {'maximum':", "failure_reason: If status == failed, this string containing the reason", "do not meet any of the conditions specified in the", "True}, 'end_time_utc': {'readonly': True}, 'type': {'readonly': True}, 'status': {'readonly': True},", "error. Possible values include: \"error\", \"warning\". :type severity: str or", "as a header per the normal ETag convention. :type etag:", "_attribute_map = { 'routes': {'key': 'routes', 'type': '[MatchedRoute]'}, } def", "'resourceGroup', 'type': 'str'}, } def __init__( self, **kwargs ): super(RoutingServiceBusQueueEndpointProperties,", ":type routes: list[~azure.mgmt.iothub.v2019_11_04.models.MatchedRoute] \"\"\" _attribute_map = { 'routes': {'key': 'routes',", "sending a request. :param properties: The description of an X509", "self.operation = None self.description = None class OperationInputs(msrest.serialization.Model): \"\"\"Input values.", "self.host_name = None self.event_hub_endpoints = kwargs.get('event_hub_endpoints', None) self.routing = kwargs.get('routing',", "for the Azure Storage account to which files are uploaded.", "issued for the Proof-Of-Possession flow. Variables are only populated by", ":param twin: Routing Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _validation", "= kwargs.get('properties', None) class MessagingEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the messaging", "hub endpoint. :type resource_group: str \"\"\" _validation = { 'connection_string':", "'output_blob_container_uri': {'required': True}, } _attribute_map = { 'input_blob_container_uri': {'key': 'inputBlobContainerUri',", "self).__init__(**kwargs) self.start = kwargs.get('start', None) self.end = kwargs.get('end', None) class", "): super(CertificateDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id = None", "\"\"\"The JSON-serialized array of JobResponse objects with a next link.", "of Certificate objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.CertificateDescription] \"\"\" _attribute_map = {", "kwargs.get('file_name_format', None) self.batch_frequency_in_seconds = kwargs.get('batch_frequency_in_seconds', None) self.max_chunk_size_in_bytes = kwargs.get('max_chunk_size_in_bytes', None)", "= { 'connection_string': {'required': True}, 'container_name': {'required': True}, } _attribute_map", "that your IoT hub uses to route messages to endpoints.", "rights: str or ~azure.mgmt.iothub.v2019_11_04.models.AccessRights \"\"\" _validation = { 'key_name': {'required':", "The minimum number of units. :vartype minimum: long :ivar maximum:", "__init__( self, **kwargs ): super(IotHubLocationDescription, self).__init__(**kwargs) self.location = kwargs.get('location', None)", "_validation = { 'name': {'readonly': True}, 'current_value': {'readonly': True}, 'max_value':", "'capacity': {'key': 'capacity', 'type': 'long'}, } def __init__( self, **kwargs", "def __init__( self, **kwargs ): super(RoutingServiceBusTopicEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string']", "kwargs['container_name'] self.file_name_format = kwargs.get('file_name_format', None) self.batch_frequency_in_seconds = kwargs.get('batch_frequency_in_seconds', None) self.max_chunk_size_in_bytes", "Hub-compatible consumer group names with a next link. Variables are", "str \"\"\" _validation = { 'input_blob_container_uri': {'required': True}, 'output_blob_container_uri': {'required':", "True}, } _attribute_map = { 'retention_time_in_days': {'key': 'retentionTimeInDays', 'type': 'long'},", "\"\"\"The properties of the fallback route. IoT Hub uses these", "routing_source: Routing source. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\",", "kwargs.get('value', None) self.next_link = None class MatchedRoute(msrest.serialization.Model): \"\"\"Routes that matched.", "in the IoT hub. All required parameters must be populated", ":ivar next_link: Link to more results. :vartype next_link: str \"\"\"", "super(EventHubConsumerGroupInfo, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id = None self.name", "None self.path = None self.endpoint = None class ExportDevicesRequest(msrest.serialization.Model): \"\"\"Use", "{ 'job_id': {'readonly': True}, 'start_time_utc': {'readonly': True}, 'end_time_utc': {'readonly': True},", "'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, 'container_name': {'key': 'containerName', 'type':", "IoT hub cloud-to-device messaging properties. :param max_delivery_count: The max delivery", "is used to serialize messages to blobs. Supported values are", "a value for this property when the enableFileUploadNotifications property is", "properties of the route that is used as a fall-back", "that represents the operation. Variables are only populated by the", "time for which a message is available to consume before", "resource tags. :type tags: dict[str, str] :param etag: The Etag", "or ~azure.mgmt.iothub.v2019_11_04.models.IotHubScaleType \"\"\" _validation = { 'minimum': {'readonly': True, 'maximum':", "paid hubs and a maximum of 5 routing rules are", "failover to. Possible values include: \"primary\", \"secondary\". :type role: str", "{ 'value': {'key': 'value', 'type': '[IotHubDescription]'}, 'next_link': {'key': 'nextLink', 'type':", "send to Azure. :param key: Required. The key or name", "self, **kwargs ): super(ErrorDetails, self).__init__(**kwargs) self.code = None self.http_status_code =", "= kwargs.get('value', None) self.next_link = None class StorageEndpointProperties(msrest.serialization.Model): \"\"\"The properties", "subscription_id: The subscription identifier of the storage account. :type subscription_id:", "list of user-provided routing rules that the IoT hub uses", "'desired': {'key': 'desired', 'type': 'object'}, 'reported': {'key': 'reported', 'type': 'object'},", "error happened. :type start: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition :param end: End where the", "the feedback queue for cloud-to-device messages. :param lock_duration_as_iso8601: The lock", "self.location = kwargs['location'] self.tags = kwargs.get('tags', None) class IotHubDescription(Resource): \"\"\"The", "IoT hub uses to route messages to built-in and custom", "_attribute_map = { 'sas_ttl_as_iso8601': {'key': 'sasTtlAsIso8601', 'type': 'duration'}, 'connection_string': {'key':", "DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect, DeviceConnect\". :type rights: str or ~azure.mgmt.iothub.v2019_11_04.models.AccessRights", "failover region when requesting manual Failover for a hub. All", "API operation. Variables are only populated by the server, and", "Azure. :ivar id: The resource identifier. :vartype id: str :ivar", "'RouteErrorPosition'}, } def __init__( self, **kwargs ): super(RouteErrorRange, self).__init__(**kwargs) self.start", "will be ignored when sending a request. :param value: :type", "'type': 'str'}, 'localized_value': {'key': 'localizedValue', 'type': 'str'}, } def __init__(", "class RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to service bus queue endpoint", "\"unhealthy\", \"dead\". :type health_status: str or ~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthStatus \"\"\" _attribute_map =", "{'key': 'maxDeliveryCount', 'type': 'int'}, 'default_ttl_as_iso8601': {'key': 'defaultTtlAsIso8601', 'type': 'duration'}, 'feedback':", "\"\"\" _attribute_map = { 'body': {'key': 'body', 'type': 'str'}, 'app_properties':", "max_value: The maximum value of the quota metric. :vartype max_value:", "Routing source. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\".", "are enabled. :type enable_file_upload_notifications: bool :param cloud_to_device: The IoT hub", "been delivered to or rejected from this endpoint. Possible values", "notation for the rule. :type ip_mask: str \"\"\" _validation =", "request. :ivar total_device_count: The total count of devices in the", ":param etag: The Etag field is *not* required. If it", "not set, the messages which do not meet any of", "to send to Azure. :ivar id: The resource identifier. :vartype", "Azure. :ivar resource_type: The type of the resource. :vartype resource_type:", "or ~azure.mgmt.iothub.v2019_11_04.models.Capabilities :ivar locations: Primary and secondary location for iot", "self, **kwargs ): super(Resource, self).__init__(**kwargs) self.id = None self.name =", "cloud_to_device: ~azure.mgmt.iothub.v2019_11_04.models.CloudToDeviceProperties :param comments: IoT hub comments. :type comments: str", "{ 'name': {'key': 'name', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type':", "_attribute_map = { 'message': {'key': 'message', 'type': 'str'}, 'severity': {'key':", "to the built-in eventhub endpoint. :type fallback_route: ~azure.mgmt.iothub.v2019_11_04.models.FallbackRouteProperties :param enrichments:", "super(ImportDevicesRequest, self).__init__(**kwargs) self.input_blob_container_uri = kwargs['input_blob_container_uri'] self.output_blob_container_uri = kwargs['output_blob_container_uri'] class IotHubCapacity(msrest.serialization.Model):", "self, **kwargs ): super(RoutingProperties, self).__init__(**kwargs) self.endpoints = kwargs.get('endpoints', None) self.routes", ":ivar maximum: The maximum number of units. :vartype maximum: long", "1}, 'maximum': {'readonly': True}, 'default': {'readonly': True}, 'scale_type': {'readonly': True},", "the conditions specified in the 'routes' section are met. This", "the 'routes' section are met. This is an optional parameter.", "self, **kwargs ): super(TestAllRoutesInput, self).__init__(**kwargs) self.routing_source = kwargs.get('routing_source', None) self.message", "routing rules. :type storage_containers: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerProperties] \"\"\" _attribute_map = { 'service_bus_queues':", "= None self.type = None class CloudToDeviceProperties(msrest.serialization.Model): \"\"\"The IoT hub", "{'key': 'serviceBusQueues', 'type': '[RoutingServiceBusQueueEndpointProperties]'}, 'service_bus_topics': {'key': 'serviceBusTopics', 'type': '[RoutingServiceBusTopicEndpointProperties]'}, 'event_hubs':", "{'readonly': True}, 'updated': {'readonly': True}, 'verification_code': {'readonly': True}, 'certificate': {'readonly':", "'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type':", "{'readonly': True}, } _attribute_map = { 'code': {'key': 'code', 'type':", "dictionary is events. This key has to be present in", "\"JSON\". :type encoding: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerPropertiesEncoding \"\"\" _validation = {", "retrial period. See IoT Hub metrics to identify errors and", "specified in the 'routes' section get routed to the built-in", "Possible values include: \"None\", \"DeviceManagement\". :type features: str or ~azure.mgmt.iothub.v2019_11_04.models.Capabilities", "id: The resource identifier. :vartype id: str :ivar name: The", "__init__( self, **kwargs ): super(IpFilterRule, self).__init__(**kwargs) self.filter_name = kwargs['filter_name'] self.action", "you can use to secure a connection to the IoT", "self).__init__(**kwargs) self.input_blob_container_uri = kwargs['input_blob_container_uri'] self.output_blob_container_uri = kwargs['output_blob_container_uri'] class IotHubCapacity(msrest.serialization.Model): \"\"\"IoT", "details: ~azure.mgmt.iothub.v2019_11_04.models.TestRouteResultDetails \"\"\" _attribute_map = { 'result': {'key': 'result', 'type':", "a request. :ivar name: Operation name: {provider}/{resource}/{read | write |", "include: \"F1\", \"S1\", \"S2\", \"S3\", \"B1\", \"B2\", \"B3\". :type name:", "**kwargs ): super(SharedAccessSignatureAuthorizationRule, self).__init__(**kwargs) self.key_name = kwargs['key_name'] self.primary_key = kwargs.get('primary_key',", "{'key': 'defaultTtlAsIso8601', 'type': 'duration'}, 'feedback': {'key': 'feedback', 'type': 'FeedbackProperties'}, }", "of user-provided routing rules that the IoT hub uses to", "'type': 'int'}, 'max_chunk_size_in_bytes': {'key': 'maxChunkSizeInBytes', 'type': 'int'}, 'encoding': {'key': 'encoding',", "value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubDescription] :ivar next_link: The next link. :vartype next_link: str", "a connection with the endpoint. No messages have been delivered", "the SKU. Possible values include: \"F1\", \"S1\", \"S2\", \"S3\", \"B1\",", ":type storage_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.StorageEndpointProperties] :param messaging_endpoints: The messaging endpoint properties", "self.subscription_id = kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) class RoutingServiceBusTopicEndpointProperties(msrest.serialization.Model):", "class CertificateListDescription(msrest.serialization.Model): \"\"\"The JSON-serialized array of Certificate objects. :param value:", "~datetime.datetime :ivar thumbprint: The certificate's thumbprint. :vartype thumbprint: str :ivar", "Required. The permissions assigned to the shared access policy. Possible", "_attribute_map = { 'job_id': {'key': 'jobId', 'type': 'str'}, 'start_time_utc': {'key':", "kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) self.container_name = kwargs['container_name'] self.file_name_format", "None self.failure_reason = None self.status_message = None self.parent_job_id = None", "self, **kwargs ): super(MatchedRoute, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) class", "self.start = kwargs.get('start', None) self.end = kwargs.get('end', None) class RouteProperties(msrest.serialization.Model):", "{'key': 'eventHubEndpoints', 'type': '{EventHubProperties}'}, 'routing': {'key': 'routing', 'type': 'RoutingProperties'}, 'storage_endpoints':", "of testing all routes. :param routes: JSON-serialized array of matched", "{'readonly': True}, 'max_value': {'readonly': True}, } _attribute_map = { 'name':", "on an IoT Hub instance. :param tags: A set of", "JSON-serialized array of IotHubSkuDescription objects with a next link. Variables", "of the fallback route. IoT Hub uses these properties when", "The primary key. :type primary_key: str :param secondary_key: The secondary", "This key has to be present in the dictionary while", "CertificateVerificationDescription(msrest.serialization.Model): \"\"\"The JSON-serialized leaf certificate. :param certificate: base-64 representation of", "str :param type: Response type. :type type: str :param unit:", "be failed over to. :type failover_region: str \"\"\" _validation =", "type. :vartype type: str \"\"\" _validation = { 'id': {'readonly':", "'value': {'key': 'value', 'type': '[Operation]'}, 'next_link': {'key': 'nextLink', 'type': 'str'},", "\"\"\" _validation = { 'source': {'required': True}, 'endpoint_names': {'required': True,", "self).__init__(**kwargs) self.endpoints = kwargs.get('endpoints', None) self.routes = kwargs.get('routes', None) self.fallback_route", "applied to. For example, DeviceMessages. Possible values include: \"Invalid\", \"DeviceMessages\",", "be ignored when sending a request. :ivar subject: The certificate's", "The connection string of the storage account. :type connection_string: str", "will be ignored when sending a request. :ivar value: List", "'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubQuotaMetricInfoListResult, self).__init__(**kwargs)", ":param encoding: Encoding that is used to serialize messages to", "tags. :type tags: dict[str, str] \"\"\" _validation = { 'id':", "= { 'resource_type': {'key': 'resourceType', 'type': 'str'}, 'sku': {'key': 'sku',", "class RoutingStorageContainerProperties(msrest.serialization.Model): \"\"\"The properties related to a storage container endpoint.", "it evaluates to true by default. For grammar, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language.", "'severity': {'key': 'severity', 'type': 'str'}, 'location': {'key': 'location', 'type': 'RouteErrorRange'},", "messages. :param lock_duration_as_iso8601: The lock duration for the feedback queue.", "the built-in eventhub endpoint. :type fallback_route: ~azure.mgmt.iothub.v2019_11_04.models.FallbackRouteProperties :param enrichments: The", "{'readonly': True}, 'reason': {'readonly': True}, } _attribute_map = { 'name_available':", "'str'}, } def __init__( self, **kwargs ): super(CertificateWithNonceDescription, self).__init__(**kwargs) self.properties", "\"\"\"The IP filter rules for the IoT hub. All required", ":ivar max_value: The maximum value of the quota metric. :vartype", "'failure_reason': {'readonly': True}, 'status_message': {'readonly': True}, 'parent_job_id': {'readonly': True}, }", "a request. :ivar subject: The certificate's subject name. :vartype subject:", "expected and IoT Hub is retrying to send data to", "custom endpoints are allowed across all endpoint types for paid", "etag: The entity tag. :vartype etag: str :ivar type: The", "None) self.next_link = None class IotHubSkuInfo(msrest.serialization.Model): \"\"\"Information about the SKU", "} def __init__( self, **kwargs ): super(EndpointHealthData, self).__init__(**kwargs) self.endpoint_id =", "The array of quota metrics objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubQuotaMetricInfo] :ivar", "'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'}, 'description': {'key': 'description',", "properties of an IoT hub shared access policy. All required", "'resourceType', 'type': 'str'}, 'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'}, 'capacity': {'key':", "that contains the IP address range in CIDR notation for", "self.enrichments = kwargs.get('enrichments', None) class RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to", "str :param current_value: Current number of IotHub type. :type current_value:", "= None self.enabled_device_count = None self.disabled_device_count = None class RouteCompilationError(msrest.serialization.Model):", "the Azure Storage endpoint for file upload. All required parameters", "notification queue. :type messaging_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.MessagingEndpointProperties] :param enable_file_upload_notifications: If True,", "{'key': 'isEnabled', 'type': 'bool'}, } def __init__( self, **kwargs ):", "\"\"\"Result of testing one route. :param result: Result of testing", "primary key. :type primary_key: str :param secondary_key: The secondary key.", "CloudToDeviceProperties(msrest.serialization.Model): \"\"\"The IoT hub cloud-to-device messaging properties. :param max_delivery_count: The", "'{str}'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type':", "endpoints. The 'unknown' status shows that the IoT Hub has", "None) self.end = kwargs.get('end', None) class RouteProperties(msrest.serialization.Model): \"\"\"The properties of", "can configure only one Azure Storage account and that MUST", "self, **kwargs ): super(MessagingEndpointProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601", "routed. Currently only one endpoint is allowed. :type endpoint_names: list[str]", "source: Required. The source that the routing rule is to", "Azure Storage account to which files are uploaded. :type connection_string:", "'routes' section are met. This is an optional parameter. When", "'type': '{str}'}, } def __init__( self, **kwargs ): super(TagsResource, self).__init__(**kwargs)", "must be unique. :type name: str :param source: Required. The", "None class EventHubConsumerGroupInfo(msrest.serialization.Model): \"\"\"The properties of the EventHubConsumerGroupInfo object. Variables", "'int'}, 'column': {'key': 'column', 'type': 'int'}, } def __init__( self,", "is valid. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload#file-upload-notification-configuration-options. :type sas_ttl_as_iso8601: ~datetime.timedelta :param connection_string:", ":param properties: Properties of routes that matched. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties", "{'key': 'id', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'unit':", "str :param subscription_id: The subscription identifier of the storage account.", "types for paid hubs and only 1 custom endpoint is", "properties related to the custom endpoints to which your IoT", "'source', 'type': 'str'}, 'condition': {'key': 'condition', 'type': 'str'}, 'endpoint_names': {'key':", ":type app_properties: dict[str, str] :param system_properties: System properties. :type system_properties:", ":type tags: dict[str, str] \"\"\" _validation = { 'id': {'readonly':", "kwargs['name'] self.source = kwargs['source'] self.condition = kwargs.get('condition', None) self.endpoint_names =", "happened. :param line: Line where the route error happened. :type", "str :param source: Required. The source to which the routing", "_attribute_map = { 'value': {'key': 'value', 'type': '[UserSubscriptionQuota]'}, 'next_link': {'key':", "kwargs.get('resource_group', None) self.container_name = kwargs['container_name'] self.file_name_format = kwargs.get('file_name_format', None) self.batch_frequency_in_seconds", ":vartype total_device_count: long :ivar enabled_device_count: The count of enabled devices", ":vartype subject: str :ivar expiry: The certificate's expiration date and", "by this rule. Possible values include: \"Accept\", \"Reject\". :type action:", "{'key': 'fallbackRoute', 'type': 'FallbackRouteProperties'}, 'enrichments': {'key': 'enrichments', 'type': '[EnrichmentProperties]'}, }", ":param source: Required. The source to which the routing rule", "= kwargs['sku'] class IotHubDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubDescription objects", "hub. Variables are only populated by the server, and will", "format is {iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All parameters are mandatory but can be", "'str'}, 'status_message': {'key': 'statusMessage', 'type': 'str'}, 'parent_job_id': {'key': 'parentJobId', 'type':", "IotHubDescription objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubDescription] :ivar next_link: The next link.", "will be ignored when sending a request. :ivar provider: Service", "'subject': {'readonly': True}, 'expiry': {'readonly': True}, 'thumbprint': {'readonly': True}, 'is_verified':", "root container where you upload files. The container need not", "self.unit = kwargs.get('unit', None) self.current_value = kwargs.get('current_value', None) self.limit =", "'value', 'type': '[UserSubscriptionQuota]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def", "): super(RoutingTwinProperties, self).__init__(**kwargs) self.desired = kwargs.get('desired', None) self.reported = kwargs.get('reported',", "Response type. :type type: str :param unit: Unit of IotHub", "'str'}, } def __init__( self, **kwargs ): super(RoutingEventHubProperties, self).__init__(**kwargs) self.connection_string", "'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): super(OperationListResult,", "for free hubs. :param service_bus_queues: The list of Service Bus", "**kwargs ): super(UserSubscriptionQuotaListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link =", "'avro'. Possible values include: \"Avro\", \"AvroDeflate\", \"JSON\". :type encoding: str", "ip_mask: Required. A string that contains the IP address range", "Devices. :vartype provider: str :ivar resource: Resource Type: IotHubs. :vartype", "FailoverInput(msrest.serialization.Model): \"\"\"Use to provide failover region when requesting manual Failover", ":ivar etag: The entity tag. :vartype etag: str :ivar type:", "_attribute_map = { 'certificate': {'key': 'certificate', 'type': 'str'}, } def", "the storage account. :type subscription_id: str :param resource_group: The name", "The list of endpoints for which the enrichment is applied", "{'key': 'maxDeliveryCount', 'type': 'int'}, } def __init__( self, **kwargs ):", "= None self.status = None self.failure_reason = None self.status_message =", "the messages to, based on the routing rules. :type service_bus_topics:", "{ 'minimum': {'key': 'minimum', 'type': 'long'}, 'maximum': {'key': 'maximum', 'type':", "and will be ignored when sending a request. :ivar value:", "retrying to send data to this endpoint. The status of", "str or ~azure.mgmt.iothub.v2019_11_04.models.JobType :ivar status: The status of the job.", "} _attribute_map = { 'message': {'key': 'message', 'type': 'RoutingMessage'}, 'route':", "'type': 'str'}, 'container_name': {'key': 'containerName', 'type': 'str'}, 'file_name_format': {'key': 'fileNameFormat',", "Tags. :type tags: object :param properties: :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwinProperties \"\"\"", "super(OperationListResult, self).__init__(**kwargs) self.value = None self.next_link = None class RegistryStatistics(msrest.serialization.Model):", "IotHubQuotaMetricInfoListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubQuotaMetricInfo objects with a next", "\"\"\" _attribute_map = { 'compilation_errors': {'key': 'compilationErrors', 'type': '[RouteCompilationError]'}, }", "to this endpoint. The status of an unhealthy endpoint will", "\"Accept\", \"Reject\". :type action: str or ~azure.mgmt.iothub.v2019_11_04.models.IpFilterActionType :param ip_mask: Required.", "self).__init__(**kwargs) self.value = kwargs.get('value', None) class CertificateProperties(msrest.serialization.Model): \"\"\"The description of", "**kwargs ): super(MessagingEndpointProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601 =", "route is enabled. :type is_enabled: bool \"\"\" _validation = {", "= None self.description = None class OperationInputs(msrest.serialization.Model): \"\"\"Input values. All", "_validation = { 'name': {'required': True}, } _attribute_map = {", "'resource_type': {'readonly': True}, 'sku': {'required': True}, 'capacity': {'required': True}, }", "'type': '{str}'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties',", "topic endpoints that the IoT hub routes the messages to,", "capacity. :type capacity: ~azure.mgmt.iothub.v2019_11_04.models.IotHubCapacity \"\"\" _validation = { 'resource_type': {'readonly':", "kwargs.get('resource_group', None) class RoutingMessage(msrest.serialization.Model): \"\"\"Routing message. :param body: Body of", "is applied to the message. :type endpoint_names: list[str] \"\"\" _validation", "{'key': 'containerName', 'type': 'str'}, } def __init__( self, **kwargs ):", "'type': 'str'}, } def __init__( self, **kwargs ): super(JobResponse, self).__init__(**kwargs)", "the user to update the tags on an IoT Hub", ":param connection_string: Required. The connection string of the storage account.", "certificate content. :type certificate: str \"\"\" _validation = { 'subject':", ":vartype type: str :ivar etag: The etag. :vartype etag: str", "\"RegistryWrite, DeviceConnect\", \"ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect\", \"RegistryRead, RegistryWrite, DeviceConnect\",", "self, **kwargs ): super(RoutingServiceBusTopicEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name =", "code is regenerated. # -------------------------------------------------------------------------- from azure.core.exceptions import HttpResponseError import", "self.tags = kwargs.get('tags', None) class TestAllRoutesInput(msrest.serialization.Model): \"\"\"Input for testing all", "'twin': {'key': 'twin', 'type': 'RoutingTwin'}, } def __init__( self, **kwargs", "\"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type routing_source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param", "is expired by the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload. :type", "'message', 'type': 'str'}, 'severity': {'key': 'severity', 'type': 'str'}, 'location': {'key':", "export of all devices in the IoT hub. All required", "parameters when requesting an export of all devices in the", "when sending a request. :param value: JSON-serialized array of Endpoint", "= kwargs.get('type', None) self.unit = kwargs.get('unit', None) self.current_value = kwargs.get('current_value',", "self).__init__(**kwargs) self.name = kwargs.get('name', None) self.source = kwargs['source'] self.condition =", "class StorageEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the Azure Storage endpoint for", "\"RegistryWrite, ServiceConnect\", \"RegistryWrite, DeviceConnect\", \"ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect\", \"RegistryRead,", "primary_key: str :param secondary_key: The secondary key. :type secondary_key: str", "\"\"\"The JSON-serialized array of Certificate objects. :param value: The array", "self.container_name = kwargs['container_name'] class TagsResource(msrest.serialization.Model): \"\"\"A container holding only the", ":type value: list[~azure.mgmt.iothub.v2019_11_04.models.UserSubscriptionQuota] :ivar next_link: :vartype next_link: str \"\"\" _validation", "True}, } _attribute_map = { 'name': {'key': 'name', 'type': 'str'},", "The array of IotHubDescription objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubDescription] :ivar next_link:", "updated: ~datetime.datetime :param certificate: The certificate content. :type certificate: str", "'name', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, } def", "the Proof-Of-Possession flow. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificatePropertiesWithNonce :ivar id: The resource", "~azure.mgmt.iothub.v2019_11_04.models.TestResultStatus :param details: Detailed result of testing route. :type details:", "optional parameter. When this property is not set, the messages", "True}, 'output_blob_container_uri': {'required': True}, } _attribute_map = { 'input_blob_container_uri': {'key':", "64 characters, and must be unique. :type name: str :param", "self, **kwargs ): super(OperationListResult, self).__init__(**kwargs) self.value = None self.next_link =", "The condition which is evaluated in order to apply the", "= kwargs['container_name'] class TagsResource(msrest.serialization.Model): \"\"\"A container holding only the Tags", ":param connection_string: Required. The connection string for the Azure Storage", "live for cloud-to-device messages in the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-", "'id': {'readonly': True}, 'name': {'readonly': True, 'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly':", "request. :param properties: The tags. :type properties: dict[str, str] :ivar", "ignored when sending a request. :param value: :type value: list[~azure.mgmt.iothub.v2019_11_04.models.UserSubscriptionQuota]", "_attribute_map = { 'subject': {'key': 'subject', 'type': 'str'}, 'expiry': {'key':", "} def __init__( self, **kwargs ): super(IpFilterRule, self).__init__(**kwargs) self.filter_name =", "} def __init__( self, **kwargs ): super(JobResponseListResult, self).__init__(**kwargs) self.value =", "hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :param endpoints: The properties related to the", "'{str}'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type':", "= { 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__(", "The Event Hub-compatible consumer group name. :vartype name: str :ivar", ":vartype minimum: long :ivar maximum: The maximum number of units.", "provided, it evaluates to true by default. For grammar, see:", "the IoT Hub has not established a connection with the", "as a fall-back route when none of the conditions specified", "True}, } _attribute_map = { 'key_name': {'key': 'keyName', 'type': 'str'},", "'subscriptionId', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, 'container_name': {'key':", "= kwargs.get('ip_filter_rules', None) self.provisioning_state = None self.state = None self.host_name", "kwargs['failover_region'] class FallbackRouteProperties(msrest.serialization.Model): \"\"\"The properties of the fallback route. IoT", "self.compilation_errors = kwargs.get('compilation_errors', None) class UserSubscriptionQuota(msrest.serialization.Model): \"\"\"User subscription quota response.", "10485760(10MB) and 524288000(500MB). Default value is 314572800(300MB). :type max_chunk_size_in_bytes: int", "'key_name': {'required': True}, 'rights': {'required': True}, } _attribute_map = {", "'code': {'readonly': True}, 'http_status_code': {'readonly': True}, 'message': {'readonly': True}, 'details':", "Certificate. :param certificate: base-64 representation of the X509 leaf certificate", "to send data to this endpoint. The status of an", "_attribute_map = { 'location': {'key': 'location', 'type': 'str'}, 'role': {'key':", "the job. :vartype start_time_utc: ~datetime.datetime :ivar end_time_utc: The time the", "'type': 'bool'}, 'cloud_to_device': {'key': 'cloudToDevice', 'type': 'CloudToDeviceProperties'}, 'comments': {'key': 'comments',", "event hub endpoint. :type resource_group: str \"\"\" _validation = {", "CertificateBodyDescription(msrest.serialization.Model): \"\"\"The JSON-serialized X509 Certificate. :param certificate: base-64 representation of", "None) self.next_link = None class StorageEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the", "when sending a request. :ivar subject: The certificate's subject name.", "rules. :type storage_containers: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerProperties] \"\"\" _attribute_map = { 'service_bus_queues': {'key':", "metrics properties. Variables are only populated by the server, and", "of health. The 'dead' status shows that the endpoint is", "minimum number of units. :vartype minimum: long :ivar maximum: The", "'str'}, } def __init__( self, **kwargs ): super(EndpointHealthDataListResult, self).__init__(**kwargs) self.value", "'type': 'duration'}, 'feedback': {'key': 'feedback', 'type': 'FeedbackProperties'}, } def __init__(", "or ~azure.mgmt.iothub.v2019_11_04.models.IotHubReplicaRoleType \"\"\" _attribute_map = { 'location': {'key': 'location', 'type':", "secure a connection to the IoT hub. :type authorization_policies: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule]", "more results. :vartype next_link: str \"\"\" _validation = { 'next_link':", "= kwargs.get('tags', None) class IotHubDescription(Resource): \"\"\"The description of the IoT", "Hub REST API operation. Variables are only populated by the", "'str'}, } def __init__( self, **kwargs ): super(OperationInputs, self).__init__(**kwargs) self.name", "'ttlAsIso8601', 'type': 'duration'}, 'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'}, } def", "'name', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'long'}, 'max_value': {'key':", "= { 'export_blob_container_uri': {'required': True}, 'exclude_keys': {'required': True}, } _attribute_map", "reason: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubNameUnavailabilityReason :param message: The detailed reason message.", "The certificate content. :type certificate: str \"\"\" _validation = {", ":ivar reason: The reason for unavailability. Possible values include: \"Invalid\",", "\"\"\" _validation = { 'id': {'readonly': True}, 'name': {'readonly': True,", "'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } def __init__(", ":type endpoint_names: list[str] \"\"\" _validation = { 'key': {'required': True},", "set of operation list results if there are any. :vartype", "provisioned IoT Hub units. See: https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits. :type capacity: long \"\"\"", "'currentValue', 'type': 'int'}, 'limit': {'key': 'limit', 'type': 'int'}, 'name': {'key':", "length of 64 characters, and must be unique. :type name:", "\"\"\"SKU properties. Variables are only populated by the server, and", "self, **kwargs ): super(EventHubConsumerGroupsListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link", "_attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'resource': {'key':", "event_hub_endpoints: The Event Hub-compatible endpoint properties. The only possible keys", "Certificate objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.CertificateDescription] \"\"\" _attribute_map = { 'value':", "} _attribute_map = { 'value': {'key': 'value', 'type': '[IotHubDescription]'}, 'next_link':", "self, **kwargs ): super(SharedAccessSignatureAuthorizationRule, self).__init__(**kwargs) self.key_name = kwargs['key_name'] self.primary_key =", "the event hub endpoint. :type subscription_id: str :param resource_group: The", "'name', 'type': 'str'}, 'tier': {'key': 'tier', 'type': 'str'}, 'capacity': {'key':", ":ivar end_time_utc: The time the job stopped processing. :vartype end_time_utc:", "properties: The description of an X509 CA Certificate including the", "'updated': {'key': 'updated', 'type': 'rfc-1123'}, 'verification_code': {'key': 'verificationCode', 'type': 'str'},", "of Event Hubs endpoints that IoT hub routes messages to,", "_validation = { 'next_link': {'readonly': True}, } _attribute_map = {", "number of partitions for receiving device-to-cloud messages in the Event", "self.authorization_policies = kwargs.get('authorization_policies', None) self.ip_filter_rules = kwargs.get('ip_filter_rules', None) self.provisioning_state =", "self).__init__(**kwargs) self.message = kwargs.get('message', None) self.route = kwargs['route'] self.twin =", "Required. The source that the routing rule is to be", "kwargs.get('routes', None) self.fallback_route = kwargs.get('fallback_route', None) self.enrichments = kwargs.get('enrichments', None)", "whether keys should be excluded during export. :type exclude_keys: bool", "{'key': 'jobId', 'type': 'str'}, 'start_time_utc': {'key': 'startTimeUtc', 'type': 'rfc-1123'}, 'end_time_utc':", "'routes', 'type': '[RouteProperties]'}, 'fallback_route': {'key': 'fallbackRoute', 'type': 'FallbackRouteProperties'}, 'enrichments': {'key':", "by the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload. :type ttl_as_iso8601: ~datetime.timedelta", "input blob container URI. :type input_blob_container_uri: str :param output_blob_container_uri: Required.", "= { 'total_device_count': {'readonly': True}, 'enabled_device_count': {'readonly': True}, 'disabled_device_count': {'readonly':", "list of Event Hubs endpoints that IoT hub routes messages", "True}, 'ip_mask': {'required': True}, } _attribute_map = { 'filter_name': {'key':", "'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'}, } def __init__( self, **kwargs", "sending a request. :param value: The array of IotHubDescription objects.", "self).__init__(**kwargs) self.tags = kwargs.get('tags', None) class TestAllRoutesInput(msrest.serialization.Model): \"\"\"Input for testing", "https://aka.ms/telemetryoneventgrid. :type enrichments: list[~azure.mgmt.iothub.v2019_11_04.models.EnrichmentProperties] \"\"\" _attribute_map = { 'endpoints': {'key':", "str :ivar name: The Event Hub-compatible consumer group name. :vartype", "an Azure resource. Variables are only populated by the server,", "when sending a request. :ivar minimum: The minimum number of", "of user-provided enrichments that the IoT hub applies to messages", "= None self.end_time_utc = None self.type = None self.status =", "parameters when requesting an import of all devices in the", "'type': 'IotHubSkuInfo'}, 'capacity': {'key': 'capacity', 'type': 'IotHubCapacity'}, } def __init__(", "): super(TestAllRoutesInput, self).__init__(**kwargs) self.routing_source = kwargs.get('routing_source', None) self.message = kwargs.get('message',", "IoT Hub is retrying to send data to this endpoint.", "will be ignored when sending a request. :ivar code: The", "tags: A set of tags. Twin Tags. :type tags: object", "contains a list of operations and a URL link to", "certificate's last update date and time. :vartype updated: ~datetime.datetime :ivar", "'parentJobId', 'type': 'str'}, } def __init__( self, **kwargs ): super(JobResponse,", "self).__init__(**kwargs) self.retention_time_in_days = kwargs.get('retention_time_in_days', None) self.partition_count = kwargs.get('partition_count', None) self.partition_ids", "connection string of the storage account. :type connection_string: str :param", "= None self.event_hub_endpoints = kwargs.get('event_hub_endpoints', None) self.routing = kwargs.get('routing', None)", "be ignored when sending a request. :ivar value: List of", ":param file_name_format: File name format for the blob. Default format", "list[~azure.mgmt.iothub.v2019_11_04.models.RouteCompilationError] \"\"\" _attribute_map = { 'compilation_errors': {'key': 'compilationErrors', 'type': '[RouteCompilationError]'},", "= None class StorageEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the Azure Storage", "properties: ~azure.mgmt.iothub.v2019_11_04.models.IotHubProperties :param sku: Required. IotHub SKU info. :type sku:", "kwargs.get('batch_frequency_in_seconds', None) self.max_chunk_size_in_bytes = kwargs.get('max_chunk_size_in_bytes', None) self.encoding = kwargs.get('encoding', None)", "True}, } _attribute_map = { 'properties': {'key': 'properties', 'type': 'CertificateProperties'},", "tags: dict[str, str] \"\"\" _validation = { 'id': {'readonly': True},", "properties. :type cloud_to_device: ~azure.mgmt.iothub.v2019_11_04.models.CloudToDeviceProperties :param comments: IoT hub comments. :type", "'provisioning_state': {'readonly': True}, 'state': {'readonly': True}, 'host_name': {'readonly': True}, 'locations':", "metric. :vartype max_value: long \"\"\" _validation = { 'name': {'readonly':", "} _attribute_map = { 'lock_duration_as_iso8601': {'key': 'lockDurationAsIso8601', 'type': 'duration'}, 'ttl_as_iso8601':", "**kwargs ): super(RouteErrorPosition, self).__init__(**kwargs) self.line = kwargs.get('line', None) self.column =", ":type export_blob_container_uri: str :param exclude_keys: Required. The value indicating whether", "resource_group: The name of the resource group of the service", "the hub will be failed over to. :type failover_region: str", ":param secondary_key: The secondary key. :type secondary_key: str :param rights:", "name of the IP filter rule. :type filter_name: str :param", "of the Azure Storage endpoint for file upload. All required", "class IpFilterRule(msrest.serialization.Model): \"\"\"The IP filter rules for the IoT hub.", "= { 'sas_ttl_as_iso8601': {'key': 'sasTtlAsIso8601', 'type': 'duration'}, 'connection_string': {'key': 'connectionString',", "__init__( self, **kwargs ): super(RoutingServiceBusQueueEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name", "= kwargs.get('message', None) self.twin = kwargs.get('twin', None) class TestAllRoutesResult(msrest.serialization.Model): \"\"\"Result", ":ivar current_value: The current value for the quota metric. :vartype", "'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__(", "endpoint. No messages have been delivered to or rejected from", "def __init__( self, **kwargs ): super(RoutingProperties, self).__init__(**kwargs) self.endpoints = kwargs.get('endpoints',", "} def __init__( self, **kwargs ): super(RoutingStorageContainerProperties, self).__init__(**kwargs) self.connection_string =", "Twin Tags. :type tags: object :param properties: :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwinProperties", "The array of JobResponse objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.JobResponse] :ivar next_link:", "'duration'}, 'ttl_as_iso8601': {'key': 'ttlAsIso8601', 'type': 'duration'}, 'max_delivery_count': {'key': 'maxDeliveryCount', 'type':", "request. :param authorization_policies: The shared access policies you can use", "is to be applied to. For example, DeviceMessages. Possible values", "'type': 'object'}, } def __init__( self, **kwargs ): super(RoutingTwinProperties, self).__init__(**kwargs)", "kwargs.get('partition_count', None) self.partition_ids = None self.path = None self.endpoint =", ":type end: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition \"\"\" _attribute_map = { 'start': {'key': 'start',", "issues with endpoints. The 'unknown' status shows that the IoT", "for a resource, allowing the user to update the tags", "~azure.mgmt.iothub.v2019_11_04.models.IpFilterActionType :param ip_mask: Required. A string that contains the IP", "_validation = { 'subject': {'readonly': True}, 'expiry': {'readonly': True}, 'thumbprint':", "include: \"unknown\", \"enqueued\", \"running\", \"completed\", \"failed\", \"cancelled\". :vartype status: str", "= kwargs.get('routing', None) self.storage_endpoints = kwargs.get('storage_endpoints', None) self.messaging_endpoints = kwargs.get('messaging_endpoints',", "used for proof of possession. :vartype verification_code: str :ivar certificate:", "{ 'value': {'key': 'value', 'type': '[EndpointHealthData]'}, 'next_link': {'key': 'nextLink', 'type':", "{ 'endpoint_id': {'key': 'endpointId', 'type': 'str'}, 'health_status': {'key': 'healthStatus', 'type':", "value for the enrichment property. :type value: str :param endpoint_names:", "a routing rule that your IoT hub uses to route", "Required. A string that contains the IP address range in", "shows that the endpoint is not accepting messages, after IoT", "self.twin = kwargs.get('twin', None) class TestAllRoutesResult(msrest.serialization.Model): \"\"\"Result of testing all", "JSON-serialized array of Event Hub-compatible consumer group names with a", "sending a request. :param value: :type value: list[~azure.mgmt.iothub.v2019_11_04.models.UserSubscriptionQuota] :ivar next_link:", "queue endpoint. :type connection_string: str :param name: Required. The name", "True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, } _attribute_map = { 'connection_string': {'key': 'connectionString',", "of testing route. :type details: ~azure.mgmt.iothub.v2019_11_04.models.TestRouteResultDetails \"\"\" _attribute_map = {", "in order to send to Azure. :param key_name: Required. The", "code that will be used for proof of possession. :vartype", "\"\"\"The JSON-serialized array of IotHubSkuDescription objects with a next link.", "next set of operation list results if there are any.", "'str'}, 'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'}, 'capacity': {'key': 'capacity', 'type':", "None) class RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to service bus queue", "route. :param result: Result of testing route. Possible values include:", "Azure Storage endpoint for file upload. All required parameters must", "list[~azure.mgmt.iothub.v2019_11_04.models.CertificateDescription] \"\"\" _attribute_map = { 'value': {'key': 'value', 'type': '[CertificateDescription]'},", "that the IoT hub routes the messages to, based on", "~azure.mgmt.iothub.v2019_11_04.models.RoutingProperties :param storage_endpoints: The list of Azure Storage endpoints where", "enrichments: list[~azure.mgmt.iothub.v2019_11_04.models.EnrichmentProperties] \"\"\" _attribute_map = { 'endpoints': {'key': 'endpoints', 'type':", "ignored when sending a request. All required parameters must be", "scale_type: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubScaleType \"\"\" _validation = { 'minimum': {'readonly':", "Azure Storage account and that MUST have its key as", "for cloud-to-device messages in the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages.", "self.status = None self.failure_reason = None self.status_message = None self.parent_job_id", "sending a request. :param value: The array of quota metrics", "True}, 'max_value': {'readonly': True}, } _attribute_map = { 'name': {'key':", "'str'}, 'source': {'key': 'source', 'type': 'str'}, 'condition': {'key': 'condition', 'type':", "route error happened. :type start: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition :param end: End where", "the IoT hub attempts to deliver a message on the", "its key as $default. Specifying more than one storage account", "value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuDescription] :ivar next_link: The next link. :vartype next_link: str", "underscores, hyphens and has a maximum length of 64 characters.", "\"\"\"Result of the request to list IoT Hub operations. It", "None self.parent_job_id = None class JobResponseListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of", "name of the quota metric. :vartype name: str :ivar current_value:", "'subject', 'type': 'str'}, 'expiry': {'key': 'expiry', 'type': 'rfc-1123'}, 'thumbprint': {'key':", "'key': {'key': 'key', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'},", "Required. The list of endpoints to which messages that satisfy", "} def __init__( self, **kwargs ): super(Operation, self).__init__(**kwargs) self.name =", "kwargs.get('value', None) self.next_link = None class StorageEndpointProperties(msrest.serialization.Model): \"\"\"The properties of", "self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class MatchedRoute(msrest.serialization.Model):", "populated in order to send to Azure. :ivar id: The", "can use to secure a connection to the IoT hub.", "get the next set of results. Variables are only populated", "= { 'value': {'key': 'value', 'type': '[IotHubDescription]'}, 'next_link': {'key': 'nextLink',", "**kwargs ): super(EnrichmentProperties, self).__init__(**kwargs) self.key = kwargs['key'] self.value = kwargs['value']", "of tags. The resource tags. :type tags: dict[str, str] :param", "common properties of an Azure resource. Variables are only populated", "True}, 'description': {'readonly': True}, } _attribute_map = { 'provider': {'key':", "kwargs.get('features', None) self.locations = None class IotHubQuotaMetricInfo(msrest.serialization.Model): \"\"\"Quota metrics properties.", "string of the event hub endpoint. :type connection_string: str :param", "expired by the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#cloud-to-device-messages. :type ttl_as_iso8601:", "self.type = None class CertificateListDescription(msrest.serialization.Model): \"\"\"The JSON-serialized array of Certificate", "~azure.mgmt.iothub.v2019_11_04.models.Capabilities :ivar locations: Primary and secondary location for iot hub.", "{provider}/{resource}/{read | write | action | delete}. :vartype name: str", "routing: The routing related properties of the IoT hub. See:", "str :param source: Required. The source that the routing rule", "sending a request. :param value: The list of shared access", "your IoT hub applies to messages delivered to endpoints. All", "Possible values include: \"F1\", \"S1\", \"S2\", \"S3\", \"B1\", \"B2\", \"B3\".", "self, **kwargs ): super(IotHubLocationDescription, self).__init__(**kwargs) self.location = kwargs.get('location', None) self.role", ":type is_enabled: bool \"\"\" _validation = { 'source': {'required': True},", "to more results. :vartype next_link: str \"\"\" _validation = {", "} def __init__( self, **kwargs ): super(IotHubNameAvailabilityInfo, self).__init__(**kwargs) self.name_available =", "of Certificate objects. :param value: The array of Certificate objects.", "'str'}, 'app_properties': {'key': 'appProperties', 'type': '{str}'}, 'system_properties': {'key': 'systemProperties', 'type':", ":param subscription_id: The subscription identifier of the service bus queue", "is_verified: Determines whether certificate has been verified. :vartype is_verified: bool", ":type value: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :ivar next_link: The next link. :vartype next_link:", "True}, 'updated': {'readonly': True}, } _attribute_map = { 'subject': {'key':", "IotHubCapacity(msrest.serialization.Model): \"\"\"IoT Hub capacity information. Variables are only populated by", "the provided name is available. :vartype name_available: bool :ivar reason:", "'min_items': 1}, 'is_enabled': {'required': True}, } _attribute_map = { 'name':", "'container_name': {'key': 'containerName', 'type': 'str'}, 'file_name_format': {'key': 'fileNameFormat', 'type': 'str'},", "{'key': 'twin', 'type': 'RoutingTwin'}, } def __init__( self, **kwargs ):", "~azure.mgmt.iothub.v2019_11_04.models.IotHubSku :ivar tier: The billing tier for the IoT hub.", "'capacity': {'required': True}, } _attribute_map = { 'resource_type': {'key': 'resourceType',", "class RoutingEndpoints(msrest.serialization.Model): \"\"\"The properties related to the custom endpoints to", "type of the job. Possible values include: \"unknown\", \"export\", \"import\",", "The name of the quota metric. :vartype name: str :ivar", ":param exclude_keys: Required. The value indicating whether keys should be", "= kwargs.get('tags', None) class TestAllRoutesInput(msrest.serialization.Model): \"\"\"Input for testing all routes.", "uses to route messages to built-in and custom endpoints. A", "will be ignored when sending a request. :param properties: The", "= kwargs.get('event_hubs', None) self.storage_containers = kwargs.get('storage_containers', None) class RoutingEventHubProperties(msrest.serialization.Model): \"\"\"The", "'value': {'readonly': True}, 'next_link': {'readonly': True}, } _attribute_map = {", "'locations', 'type': '[IotHubLocationDescription]'}, } def __init__( self, **kwargs ): super(IotHubProperties,", "CertificateDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables are only populated by the", "ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count: The number of times the IoT", "specified in the 'routes' section are met. This is an", "populated in order to send to Azure. :param input_blob_container_uri: Required.", "'route', 'type': 'RouteProperties'}, 'twin': {'key': 'twin', 'type': 'RoutingTwin'}, } def", "self.connection_string = kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id = kwargs.get('subscription_id', None)", "kwargs.get('desired', None) self.reported = kwargs.get('reported', None) class SharedAccessSignatureAuthorizationRule(msrest.serialization.Model): \"\"\"The properties", "be populated in order to send to Azure. :param sas_ttl_as_iso8601:", "): super(TestRouteInput, self).__init__(**kwargs) self.message = kwargs.get('message', None) self.route = kwargs['route']", "that MUST have its key as $default. Specifying more than", "'str'}, 'condition': {'key': 'condition', 'type': 'str'}, 'endpoint_names': {'key': 'endpointNames', 'type':", "is to be applied to, such as DeviceMessages. Possible values", "True}, 'type': {'readonly': True}, } _attribute_map = { 'properties': {'key':", "None self.type = None self.etag = None class EventHubConsumerGroupsListResult(msrest.serialization.Model): \"\"\"The", "values include: \"primary\", \"secondary\". :type role: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubReplicaRoleType \"\"\"", "_validation = { 'name_available': {'readonly': True}, 'reason': {'readonly': True}, }", "'localized_value': {'key': 'localizedValue', 'type': 'str'}, } def __init__( self, **kwargs", "file or just .pem file content. :type certificate: str \"\"\"", ":vartype name: str :ivar type: The resource type. :vartype type:", "policy. :type key_name: str :param primary_key: The primary key. :type", "are routed to. Currently only 1 endpoint is allowed. :type", ":vartype etag: str :ivar type: The resource type. :vartype type:", "provided as a header per the normal ETag convention. :type", "super(CertificateBodyDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None) class CertificateDescription(msrest.serialization.Model): \"\"\"The X509", "license information. # Code generated by Microsoft (R) AutoRest Code", "self, **kwargs ): super(FeedbackProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601", "str :param endpoint_names: Required. The list of endpoints for which", ":param location: Location where the route error happened. :type location:", "'type': '[IotHubQuotaMetricInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(", ":vartype enabled_device_count: long :ivar disabled_device_count: The count of disabled devices", "the service bus topic endpoint. :type connection_string: str :param name:", "= { 'provider': {'readonly': True}, 'resource': {'readonly': True}, 'operation': {'readonly':", "{ 'line': {'key': 'line', 'type': 'int'}, 'column': {'key': 'column', 'type':", "parent_job_id: str \"\"\" _validation = { 'job_id': {'readonly': True}, 'start_time_utc':", "~azure.mgmt.iothub.v2019_11_04.models.MessagingEndpointProperties] :param enable_file_upload_notifications: If True, file upload notifications are enabled.", "self.subscription_id = kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) self.container_name =", "thumbprint. :vartype thumbprint: str :ivar is_verified: Determines whether certificate has", "available. Variables are only populated by the server, and will", "subscription_id: The subscription identifier of the service bus topic endpoint.", "encoding: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerPropertiesEncoding \"\"\" _validation = { 'connection_string': {'required':", "The error details. :vartype details: str \"\"\" _validation = {", "'type': {'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs", "The certificate's subject name. :vartype subject: str :ivar expiry: The", "Used to specify whether the fallback route is enabled. :type", "shared access policies you can use to secure a connection", "If it is provided in the response body, it must", ":param condition: The condition which is evaluated in order to", "The detailed reason message. :type message: str \"\"\" _validation =", "source: Required. The source to which the routing rule is", "IotHubSkuDescription objects with a next link. Variables are only populated", "'created', 'type': 'rfc-1123'}, 'updated': {'key': 'updated', 'type': 'rfc-1123'}, 'verification_code': {'key':", "} _attribute_map = { 'value': {'key': 'value', 'type': '[Operation]'}, 'next_link':", "list of user-provided enrichments that the IoT hub applies to", "certificate's create date and time. :vartype created: ~datetime.datetime :ivar updated:", "IoT hub routes messages based on the routing rules. A", "The resource location. :type location: str :param tags: A set", "self.etag = kwargs.get('etag', None) self.properties = kwargs.get('properties', None) self.sku =", "name for the enrichment property. :type key: str :param value:", "the region, can be either primary or secondary. The primary", "of an X509 CA Certificate. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificateProperties :ivar id:", ":type key: str :param value: Required. The value for the", "str] :ivar id: The Event Hub-compatible consumer group identifier. :vartype", "super(CertificateProperties, self).__init__(**kwargs) self.subject = None self.expiry = None self.thumbprint =", "testing route. All required parameters must be populated in order", "{'key': 'batchFrequencyInSeconds', 'type': 'int'}, 'max_chunk_size_in_bytes': {'key': 'maxChunkSizeInBytes', 'type': 'int'}, 'encoding':", "_validation = { 'failover_region': {'required': True}, } _attribute_map = {", "\"\"\" _validation = { 'resource_type': {'readonly': True}, 'sku': {'required': True},", "str :param subscription_id: The subscription identifier of the service bus", "The condition that is evaluated to apply the routing rule.", "type of the resource. :vartype resource_type: str :param sku: Required.", "subscription quota response. Variables are only populated by the server,", "values. All required parameters must be populated in order to", "kwargs.get('service_bus_topics', None) self.event_hubs = kwargs.get('event_hubs', None) self.storage_containers = kwargs.get('storage_containers', None)", "duration for the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages. :type lock_duration_as_iso8601: ~datetime.timedelta", "_attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key':", "{ 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self,", "None) class TestRouteInput(msrest.serialization.Model): \"\"\"Input for testing route. All required parameters", ":vartype created: ~datetime.datetime :ivar updated: The certificate's last update date", "ip_mask: str \"\"\" _validation = { 'filter_name': {'required': True}, 'action':", "str :param app_properties: App properties. :type app_properties: dict[str, str] :param", ":ivar certificate: The certificate content. :vartype certificate: str \"\"\" _validation", "line: int :param column: Column where the route error happened.", "if the code is regenerated. # -------------------------------------------------------------------------- from azure.core.exceptions import", "happened. :type end: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition \"\"\" _attribute_map = { 'start': {'key':", "_attribute_map = { 'service_bus_queues': {'key': 'serviceBusQueues', 'type': '[RoutingServiceBusQueueEndpointProperties]'}, 'service_bus_topics': {'key':", "sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo \"\"\" _validation = { 'id': {'readonly': True}, 'name':", "'display': {'key': 'display', 'type': 'OperationDisplay'}, } def __init__( self, **kwargs", "\"\"\" _validation = { 'route': {'required': True}, } _attribute_map =", "} _attribute_map = { 'authorization_policies': {'key': 'authorizationPolicies', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'ip_filter_rules':", "and IoT Hub is retrying to send data to this", "True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[SharedAccessSignatureAuthorizationRule]'},", "cloud-to-device messages in the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to- device-messages. :type", "= kwargs.get('comments', None) self.features = kwargs.get('features', None) self.locations = None", "values include: \"unknown\", \"healthy\", \"unhealthy\", \"dead\". :type health_status: str or", "list[~azure.mgmt.iothub.v2019_11_04.models.RouteProperties] :param fallback_route: The properties of the route that is", "list[str] :ivar path: The Event Hub-compatible name. :vartype path: str", "None) class RoutingMessage(msrest.serialization.Model): \"\"\"Routing message. :param body: Body of routing", "def __init__( self, **kwargs ): super(RouteProperties, self).__init__(**kwargs) self.name = kwargs['name']", "} _attribute_map = { 'properties': {'key': 'properties', 'type': 'CertificatePropertiesWithNonce'}, 'id':", "Health statuses have following meanings. The 'healthy' status shows that", "'status', 'type': 'str'}, 'failure_reason': {'key': 'failureReason', 'type': 'str'}, 'status_message': {'key':", "of an Azure resource. Variables are only populated by the", "'storageEndpoints', 'type': '{StorageEndpointProperties}'}, 'messaging_endpoints': {'key': 'messagingEndpoints', 'type': '{MessagingEndpointProperties}'}, 'enable_file_upload_notifications': {'key':", "number of provisioned IoT Hub units. See: https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits. :type capacity:", "IotHubSkuDescription(msrest.serialization.Model): \"\"\"SKU properties. Variables are only populated by the server,", "} def __init__( self, **kwargs ): super(ErrorDetails, self).__init__(**kwargs) self.code =", "of the operation. :vartype description: str \"\"\" _validation = {", "created: The certificate's create date and time. :vartype created: ~datetime.datetime", "The name of the certificate. :vartype name: str :ivar etag:", "per the normal ETag convention. :type etag: str :param properties:", "{'key': 'condition', 'type': 'str'}, 'endpoint_names': {'key': 'endpointNames', 'type': '[str]'}, 'is_enabled':", ":type certificate: str \"\"\" _validation = { 'subject': {'readonly': True},", "operations supported by the Microsoft.Devices resource provider. :vartype value: list[~azure.mgmt.iothub.v2019_11_04.models.Operation]", "class CertificateDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables are only populated by", "period of time for which a message is available to", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages. :type retention_time_in_days: long :param partition_count: The number of partitions", "messages, after IoT Hub retried sending messages for the retrial", "= kwargs.get('max_delivery_count', None) class ImportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters when", "= None self.current_value = None self.max_value = None class IotHubQuotaMetricInfoListResult(msrest.serialization.Model):", "'type': 'FeedbackProperties'}, } def __init__( self, **kwargs ): super(CloudToDeviceProperties, self).__init__(**kwargs)", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload. :type max_delivery_count: int \"\"\" _validation = { 'max_delivery_count':", ":param service_bus_topics: The list of Service Bus topic endpoints that", "'value', 'type': 'str'}, 'localized_value': {'key': 'localizedValue', 'type': 'str'}, } def", "populated in order to send to Azure. :param name: The", "__init__( self, **kwargs ): super(IotHubSkuDescription, self).__init__(**kwargs) self.resource_type = None self.sku", "self.next_link = None class MatchedRoute(msrest.serialization.Model): \"\"\"Routes that matched. :param properties:", "must also be provided as a header per the normal", "start: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition :param end: End where the route error happened.", "): super(TagsResource, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) class TestAllRoutesInput(msrest.serialization.Model): \"\"\"Input", "normal ETag convention. :type etag: str :param properties: IotHub properties.", "= { 'job_id': {'readonly': True}, 'start_time_utc': {'readonly': True}, 'end_time_utc': {'readonly':", "a request. :ivar name: The name of the quota metric.", ":param failover_region: Required. Region the hub will be failed over", "\"\"\" _attribute_map = { 'start': {'key': 'start', 'type': 'RouteErrorPosition'}, 'end':", "routing rules. A maximum of 10 custom endpoints are allowed", "hub is currently provisioned. The secondary region is the Azure", "{'key': 'totalDeviceCount', 'type': 'long'}, 'enabled_device_count': {'key': 'enabledDeviceCount', 'type': 'long'}, 'disabled_device_count':", "'secondaryKey', 'type': 'str'}, 'rights': {'key': 'rights', 'type': 'str'}, } def", "rules are allowed for paid hubs and a maximum of", "has to be present in the dictionary while making create", "hub routes messages to, based on the routing rules. :type", "Route properties. :type route: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties :param twin: Routing Twin Reference.", "str] :param etag: The Etag field is *not* required. If", "class ImportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters when requesting an import", "__init__( self, **kwargs ): super(RoutingStorageContainerProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name", "'str'}, } def __init__( self, **kwargs ): super(IotHubLocationDescription, self).__init__(**kwargs) self.location", "of endpoints for which the enrichment is applied to the", "tags. Resource tags. :type tags: dict[str, str] \"\"\" _attribute_map =", "policy. All required parameters must be populated in order to", "unique across endpoint types. The name need not be the", "} _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, }", "access policies. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :ivar next_link: The next link.", "~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param route: Required. Route properties. :type route: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties :param", "= kwargs.get('encoding', None) class RoutingTwin(msrest.serialization.Model): \"\"\"Twin reference input parameter. This", "File name format for the blob. Default format is {iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}.", "seconds. Default value is 300 seconds. :type batch_frequency_in_seconds: int :param", "'locations': {'key': 'locations', 'type': '[IotHubLocationDescription]'}, } def __init__( self, **kwargs", "= { 'value': {'key': 'value', 'type': 'str'}, 'localized_value': {'key': 'localizedValue',", "alphanumeric characters, periods, underscores, hyphens, has a maximum length of", "metrics to identify errors and monitor issues with endpoints. The", "the parent job, if any. :vartype parent_job_id: str \"\"\" _validation", "error message. :type message: str :param severity: Severity of the", "The resource identifier. :vartype id: str :ivar name: The name", "issued for the Proof-Of-Possession flow. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificatePropertiesWithNonce :ivar id:", "IoT Hub instance. :param tags: A set of tags. Resource", "} def __init__( self, **kwargs ): super(FallbackRouteProperties, self).__init__(**kwargs) self.name =", "Hub-compatible consumer group name. :vartype name: str :ivar type: the", "the routing rule is to be applied to. For example,", "_validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'etag':", "= None self.start_time_utc = None self.end_time_utc = None self.type =", "where the route error happened. :type end: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition \"\"\" _attribute_map", "Required. The name of the IoT hub to check. :type", "path: The Event Hub-compatible name. :vartype path: str :ivar endpoint:", "'str'}, 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, 'rights': {'key': 'rights', 'type':", "JobResponseListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of JobResponse objects with a next", "state: str :ivar host_name: The name of the host. :vartype", "\"\"\"The properties of an enrichment that your IoT hub applies", "= { 'key_name': {'key': 'keyName', 'type': 'str'}, 'primary_key': {'key': 'primaryKey',", "JSON-serialized array of IotHubQuotaMetricInfo objects with a next link. Variables", "results. Variables are only populated by the server, and will", "'body': {'key': 'body', 'type': 'str'}, 'app_properties': {'key': 'appProperties', 'type': '{str}'},", "'type': 'str'}, 'app_properties': {'key': 'appProperties', 'type': '{str}'}, 'system_properties': {'key': 'systemProperties',", "resource. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo :param capacity: Required. IotHub capacity. :type", "class JobResponse(msrest.serialization.Model): \"\"\"The properties of the Job Response object. Variables", "None) self.partition_count = kwargs.get('partition_count', None) self.partition_ids = None self.path =", "{ 'name': {'key': 'name', 'type': 'str'}, } def __init__( self,", "to Azure. :param failover_region: Required. Region the hub will be", "{'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, } _attribute_map = { 'connection_string': {'key':", "'type': 'str'}, 'batch_frequency_in_seconds': {'key': 'batchFrequencyInSeconds', 'type': 'int'}, 'max_chunk_size_in_bytes': {'key': 'maxChunkSizeInBytes',", "the enrichment property. :type key: str :param value: Required. The", "list of storage container endpoints that IoT hub routes messages", "Required. Region the hub will be failed over to. :type", "number of units. :vartype default: long :ivar scale_type: The type", "for the Proof-Of-Possession flow. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificatePropertiesWithNonce :ivar id: The", "used by this IoT hub. :param lock_duration_as_iso8601: The lock duration.", "'sku': {'required': True}, 'capacity': {'required': True}, } _attribute_map = {", "endpoint types for paid hubs and only 1 custom endpoint", "of all devices in the hub. All required parameters must", "export_blob_container_uri: Required. The export blob container URI. :type export_blob_container_uri: str", "should be excluded during export. :type exclude_keys: bool \"\"\" _validation", "include alphanumeric characters, periods, underscores, hyphens and has a maximum", "resource group of the service bus queue endpoint. :type resource_group:", "\"\"\" _attribute_map = { 'service_bus_queues': {'key': 'serviceBusQueues', 'type': '[RoutingServiceBusQueueEndpointProperties]'}, 'service_bus_topics':", "self.tier = None self.capacity = kwargs.get('capacity', None) class IpFilterRule(msrest.serialization.Model): \"\"\"The", "app_properties: dict[str, str] :param system_properties: System properties. :type system_properties: dict[str,", "provisioned Event Hub-compatible endpoint used by the IoT hub. Variables", "value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubQuotaMetricInfo] :ivar next_link: The next link. :vartype next_link: str", "REST API operation. Variables are only populated by the server,", "'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type',", "and will be ignored when sending a request. :param value:", "to an event hub endpoint. All required parameters must be", "class IotHubSkuDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubSkuDescription objects with a", "ignored when sending a request. :param retention_time_in_days: The retention time", ":vartype id: str :ivar name: The resource name. :vartype name:", "'name', 'type': 'str'}, 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, 'resource_group': {'key':", "string of the service bus topic endpoint. :type connection_string: str", "\"\"\" _validation = { 'failover_region': {'required': True}, } _attribute_map =", "name: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSku :ivar tier: The billing tier for", ":param feedback: The properties of the feedback queue for cloud-to-device", "to, based on the routing rules. :type service_bus_topics: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusTopicEndpointProperties] :param", "The time the job stopped processing. :vartype end_time_utc: ~datetime.datetime :ivar", "IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#cloud-to-device-messages. :type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count:", "self).__init__(**kwargs) self.line = kwargs.get('line', None) self.column = kwargs.get('column', None) class", "self.next_link = None class RegistryStatistics(msrest.serialization.Model): \"\"\"Identity registry statistics. Variables are", "send to Azure. :param sas_ttl_as_iso8601: The period of time for", "= kwargs.get('start', None) self.end = kwargs.get('end', None) class RouteProperties(msrest.serialization.Model): \"\"\"The", "'type': '[CertificateDescription]'}, } def __init__( self, **kwargs ): super(CertificateListDescription, self).__init__(**kwargs)", "host. :vartype host_name: str :param event_hub_endpoints: The Event Hub-compatible endpoint", "MIT License. See License.txt in the project root for license", "any. :vartype next_link: str \"\"\" _validation = { 'value': {'readonly':", "output_blob_container_uri: Required. The output blob container URI. :type output_blob_container_uri: str", "'str'}, } def __init__( self, **kwargs ): super(Name, self).__init__(**kwargs) self.value", "etag: str :param properties: IotHub properties. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.IotHubProperties :param", "{'readonly': True}, 'state': {'readonly': True}, 'host_name': {'readonly': True}, 'locations': {'readonly':", "name. :type localized_value: str \"\"\" _attribute_map = { 'value': {'key':", "kwargs['endpoint_names'] class ErrorDetails(msrest.serialization.Model): \"\"\"Error details. Variables are only populated by", "def __init__( self, **kwargs ): super(EndpointHealthDataListResult, self).__init__(**kwargs) self.value = kwargs.get('value',", "'type': 'str'}, 'exclude_keys': {'key': 'excludeKeys', 'type': 'bool'}, } def __init__(", "lost if the code is regenerated. # -------------------------------------------------------------------------- from azure.core.exceptions", "{ 'source': {'required': True}, 'endpoint_names': {'required': True, 'max_items': 1, 'min_items':", "Microsoft.Devices resource provider. :vartype value: list[~azure.mgmt.iothub.v2019_11_04.models.Operation] :ivar next_link: URL to", "self, **kwargs ): super(UserSubscriptionQuotaListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link", "The following names are reserved: events, fileNotifications, $default. Endpoint names", ":type output_blob_container_uri: str \"\"\" _validation = { 'input_blob_container_uri': {'required': True},", "will be ignored when sending a request. :ivar name_available: The", "= { 'endpoints': {'key': 'endpoints', 'type': 'RoutingEndpoints'}, 'routes': {'key': 'routes',", "endpoint. :type endpoint_id: str :param health_status: Health statuses have following", "files. The container need not exist but should be creatable", "'type': 'bool'}, 'reason': {'key': 'reason', 'type': 'str'}, 'message': {'key': 'message',", "array of Certificate objects. :param value: The array of Certificate", "key has to be present in the dictionary while making", "= None self.etag = None self.type = None class CertificateListDescription(msrest.serialization.Model):", "'endpoint', 'type': 'str'}, } def __init__( self, **kwargs ): super(EventHubProperties,", "kwargs['is_enabled'] class FeedbackProperties(msrest.serialization.Model): \"\"\"The properties of the feedback queue for", "A maximum of 10 custom endpoints are allowed across all", "class Resource(msrest.serialization.Model): \"\"\"The common properties of an Azure resource. Variables", "{'key': 'primaryKey', 'type': 'str'}, 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, 'rights':", "is provisioned. :param location: The name of the Azure region.", "start_time_utc: The start time of the job. :vartype start_time_utc: ~datetime.datetime", "Default value is 'avro'. Possible values include: \"Avro\", \"AvroDeflate\", \"JSON\".", "properties. Variables are only populated by the server, and will", ":ivar created: The certificate's create date and time. :vartype created:", "None self.host_name = None self.event_hub_endpoints = kwargs.get('event_hub_endpoints', None) self.routing =", "the storage account. :type connection_string: str :param name: Required. The", "self.name = None self.type = None self.etag = None class", "The 'dead' status shows that the endpoint is not accepting", "Required. The connection string of the service bus topic endpoint.", "_validation = { 'export_blob_container_uri': {'required': True}, 'exclude_keys': {'required': True}, }", "} def __init__( self, **kwargs ): super(CloudToDeviceProperties, self).__init__(**kwargs) self.max_delivery_count =", "): super(FeedbackProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601',", "'name': {'key': 'name', 'type': 'str'}, 'tier': {'key': 'tier', 'type': 'str'},", ":vartype name: str :ivar current_value: The current value for the", "{'key': 'provider', 'type': 'str'}, 'resource': {'key': 'resource', 'type': 'str'}, 'operation':", "self.type = None self.etag = None class EventHubConsumerGroupsListResult(msrest.serialization.Model): \"\"\"The JSON-serialized", "Storage account to which files are uploaded. :type connection_string: str", "class RouteErrorRange(msrest.serialization.Model): \"\"\"Range of route errors. :param start: Start where", "TestAllRoutesInput(msrest.serialization.Model): \"\"\"Input for testing all routes. :param routing_source: Routing source.", "in order to send to Azure. :ivar id: The resource", "include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type source: str or", "resource_group: str :param container_name: Required. The name of storage container", "self.filter_name = kwargs['filter_name'] self.action = kwargs['action'] self.ip_mask = kwargs['ip_mask'] class", "All rights reserved. # Licensed under the MIT License. See", "will be ignored when sending a request. All required parameters", "list[~azure.mgmt.iothub.v2019_11_04.models.IpFilterRule] :ivar provisioning_state: The provisioning state. :vartype provisioning_state: str :ivar", "self, **kwargs ): super(RouteCompilationError, self).__init__(**kwargs) self.message = kwargs.get('message', None) self.severity", "class OperationInputs(msrest.serialization.Model): \"\"\"Input values. All required parameters must be populated", "'str'}, } def __init__( self, **kwargs ): super(EventHubProperties, self).__init__(**kwargs) self.retention_time_in_days", "X509 CA Certificate. Variables are only populated by the server,", "is_enabled: bool \"\"\" _validation = { 'source': {'required': True}, 'endpoint_names':", "self, **kwargs ): super(IotHubSkuDescription, self).__init__(**kwargs) self.resource_type = None self.sku =", "None class OperationInputs(msrest.serialization.Model): \"\"\"Input values. All required parameters must be", "\"\"\" _attribute_map = { 'location': {'key': 'location', 'type': 'str'}, 'role':", "parent_job_id: The job identifier of the parent job, if any.", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload. :type max_delivery_count: int \"\"\" _validation = { 'max_delivery_count': {'maximum':", "\"\"\"The properties of an IoT hub. Variables are only populated", ":param max_delivery_count: The number of times the IoT hub attempts", "result: Result of testing route. Possible values include: \"undefined\", \"false\",", "Code Generator. # Changes may cause incorrect behavior and will", "of IotHubSkuDescription. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuDescription] :ivar next_link: The next link.", "{'key': 'status', 'type': 'str'}, 'failure_reason': {'key': 'failureReason', 'type': 'str'}, 'status_message':", "| action | delete}. :vartype name: str :param display: The", ":param value: The array of IotHubDescription objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubDescription]", "topic endpoint. :type connection_string: str :param name: Required. The name", "__init__( self, **kwargs ): super(TestRouteResultDetails, self).__init__(**kwargs) self.compilation_errors = kwargs.get('compilation_errors', None)", "the IoT hub applies to messages to be delivered to", "str :param localized_value: Localized value of name. :type localized_value: str", "of operations and a URL link to get the next", "type: The resource type. :vartype type: str \"\"\" _validation =", "of matched routes. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.MatchedRoute] \"\"\" _attribute_map = {", "metric. :vartype name: str :ivar current_value: The current value for", ":type service_bus_queues: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusQueueEndpointProperties] :param service_bus_topics: The list of Service Bus", "of the IoT hub. Variables are only populated by the", "request to list IoT Hub operations. It contains a list", "kwargs.get('type', None) self.unit = kwargs.get('unit', None) self.current_value = kwargs.get('current_value', None)", "hub. :param lock_duration_as_iso8601: The lock duration. See: https://docs.microsoft.com/azure/iot- hub/iot-hub-devguide-file-upload. :type", "'RouteErrorPosition'}, 'end': {'key': 'end', 'type': 'RouteErrorPosition'}, } def __init__( self,", "a request. :ivar provider: Service provider: Microsoft Devices. :vartype provider:", "\"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type routing_source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource", ":type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601: The period of time for", "): super(CertificateVerificationDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None) class CertificateWithNonceDescription(msrest.serialization.Model): \"\"\"The", "super(CertificateWithNonceDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id = None self.name", "self.display = kwargs.get('display', None) class OperationDisplay(msrest.serialization.Model): \"\"\"The object that represents", "True}, 'location': {'required': True}, 'sku': {'required': True}, } _attribute_map =", "the IoT hub. :type event_hub_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.EventHubProperties] :param routing: The", "10485760}, } _attribute_map = { 'connection_string': {'key': 'connectionString', 'type': 'str'},", "'str'}, 'resource': {'key': 'resource', 'type': 'str'}, 'operation': {'key': 'operation', 'type':", "ip_filter_rules: list[~azure.mgmt.iothub.v2019_11_04.models.IpFilterRule] :ivar provisioning_state: The provisioning state. :vartype provisioning_state: str", "__init__( self, **kwargs ): super(RouteErrorRange, self).__init__(**kwargs) self.start = kwargs.get('start', None)", "Azure. :param sas_ttl_as_iso8601: The period of time for which the", ":vartype path: str :ivar endpoint: The Event Hub-compatible endpoint. :vartype", "= kwargs.get('condition', None) self.endpoint_names = kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class", "'status': {'readonly': True}, 'failure_reason': {'readonly': True}, 'status_message': {'readonly': True}, 'parent_job_id':", "kwargs.get('condition', None) self.endpoint_names = kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class RoutingEndpoints(msrest.serialization.Model):", "Azure. :param failover_region: Required. Region the hub will be failed", "list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusQueueEndpointProperties] :param service_bus_topics: The list of Service Bus topic endpoints", "key. :type primary_key: str :param secondary_key: The secondary key. :type", "name is available. :vartype name_available: bool :ivar reason: The reason", "resource name. :vartype name: str :ivar type: The resource type.", "of the storage account. :type subscription_id: str :param resource_group: The", "{ 'subject': {'readonly': True}, 'expiry': {'readonly': True}, 'thumbprint': {'readonly': True},", "self.routes = kwargs.get('routes', None) self.fallback_route = kwargs.get('fallback_route', None) self.enrichments =", "hubs and only 1 custom endpoint is allowed across all", "in the identity registry. :vartype total_device_count: long :ivar enabled_device_count: The", "{'required': True}, 'value': {'required': True}, 'endpoint_names': {'required': True, 'min_items': 1},", "super(IotHubCapacity, self).__init__(**kwargs) self.minimum = None self.maximum = None self.default =", "class Name(msrest.serialization.Model): \"\"\"Name of Iot Hub type. :param value: IotHub", "of the resource group of the storage account. :type resource_group:", "end_time_utc: ~datetime.datetime :ivar type: The type of the job. Possible", "and will be ignored when sending a request. :ivar provider:", "order to send to Azure. :ivar id: The resource identifier.", "self).__init__(**kwargs) self.routing_source = kwargs.get('routing_source', None) self.message = kwargs.get('message', None) self.twin", "for paid hubs and only 1 custom endpoint is allowed", "kwargs['name'] self.tier = None self.capacity = kwargs.get('capacity', None) class IpFilterRule(msrest.serialization.Model):", ":param input_blob_container_uri: Required. The input blob container URI. :type input_blob_container_uri:", ":ivar start_time_utc: The start time of the job. :vartype start_time_utc:", "of the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :param endpoints: The properties", "\"\"\"The properties related to service bus topic endpoint types. All", "'storage_endpoints': {'key': 'storageEndpoints', 'type': '{StorageEndpointProperties}'}, 'messaging_endpoints': {'key': 'messagingEndpoints', 'type': '{MessagingEndpointProperties}'},", "provided name is available. :vartype name_available: bool :ivar reason: The", "self, **kwargs ): super(RouteErrorRange, self).__init__(**kwargs) self.start = kwargs.get('start', None) self.end", "A string that contains the IP address range in CIDR", "partition_ids: list[str] :ivar path: The Event Hub-compatible name. :vartype path:", "None) self.reported = kwargs.get('reported', None) class SharedAccessSignatureAuthorizationRule(msrest.serialization.Model): \"\"\"The properties of", "self.created = None self.updated = None self.verification_code = None self.certificate", "= kwargs['action'] self.ip_mask = kwargs['ip_mask'] class JobResponse(msrest.serialization.Model): \"\"\"The properties of", "\"backup\", \"readDeviceProperties\", \"writeDeviceProperties\", \"updateDeviceConfiguration\", \"rebootDevice\", \"factoryResetDevice\", \"firmwareUpdate\". :vartype type: str", "self, **kwargs ): super(CertificateProperties, self).__init__(**kwargs) self.subject = None self.expiry =", "\"\"\"A container holding only the Tags for a resource, allowing", "{ 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'source': {'required': True}, 'endpoint_names':", "should be creatable using the connectionString specified. :type container_name: str", "'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, } _attribute_map = { 'connection_string': {'key': 'connectionString', 'type':", "'{str}'}, } def __init__( self, **kwargs ): super(TagsResource, self).__init__(**kwargs) self.tags", "None) self.messaging_endpoints = kwargs.get('messaging_endpoints', None) self.enable_file_upload_notifications = kwargs.get('enable_file_upload_notifications', None) self.cloud_to_device", "route error. Possible values include: \"error\", \"warning\". :type severity: str", "'reason': {'readonly': True}, } _attribute_map = { 'name_available': {'key': 'nameAvailable',", "'column', 'type': 'int'}, } def __init__( self, **kwargs ): super(RouteErrorPosition,", "class RouteProperties(msrest.serialization.Model): \"\"\"The properties of a routing rule that your", "str :param batch_frequency_in_seconds: Time interval at which blobs are written", "{ 'endpoints': {'key': 'endpoints', 'type': 'RoutingEndpoints'}, 'routes': {'key': 'routes', 'type':", "to secure a connection to the IoT hub. :type authorization_policies:", "not exist but should be creatable using the connectionString specified.", "hub to check. :type name: str \"\"\" _validation = {", "name of the resource group of the storage account. :type", ":param event_hub_endpoints: The Event Hub-compatible endpoint properties. The only possible", "= None self.thumbprint = None self.is_verified = None self.created =", "{'key': 'properties', 'type': '{str}'}, 'id': {'key': 'id', 'type': 'str'}, 'name':", "IpFilterRule(msrest.serialization.Model): \"\"\"The IP filter rules for the IoT hub. All", "def __init__( self, **kwargs ): super(Operation, self).__init__(**kwargs) self.name = None", "message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param twin: Routing Twin Reference. :type", "you upload files. The container need not exist but should", "\"S2\", \"S3\", \"B1\", \"B2\", \"B3\". :type name: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSku", "= None self.parent_job_id = None class JobResponseListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array", "'value', 'type': 'str'}, 'endpoint_names': {'key': 'endpointNames', 'type': '[str]'}, } def", ":type display: ~azure.mgmt.iothub.v2019_11_04.models.OperationDisplay \"\"\" _validation = { 'name': {'readonly': True},", "Default format is {iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All parameters are mandatory but can", "IoT hub name is available. Variables are only populated by", "nonce issued for the Proof-Of-Possession flow. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificatePropertiesWithNonce :ivar", "True}, } _attribute_map = { 'failover_region': {'key': 'failoverRegion', 'type': 'str'},", "IoT hub. Possible values include: \"None\", \"DeviceManagement\". :type features: str", "list does not include the built-in Event Hubs endpoint. :type", "EventHubProperties(msrest.serialization.Model): \"\"\"The properties of the provisioned Event Hub-compatible endpoint used", "kwargs['value'] self.endpoint_names = kwargs['endpoint_names'] class ErrorDetails(msrest.serialization.Model): \"\"\"Error details. Variables are", "True}, 'operation': {'readonly': True}, 'description': {'readonly': True}, } _attribute_map =", "storage container endpoint. All required parameters must be populated in", "str :ivar etag: The entity tag. :vartype etag: str :ivar", "certificate: base-64 representation of X509 certificate .cer file or just", "devices in the identity registry. :vartype total_device_count: long :ivar enabled_device_count:", "{ 'name': {'readonly': True}, } _attribute_map = { 'name': {'key':", "of the service bus queue endpoint. :type subscription_id: str :param", "{'key': 'storageContainers', 'type': '[RoutingStorageContainerProperties]'}, } def __init__( self, **kwargs ):", "} def __init__( self, **kwargs ): super(RoutingServiceBusQueueEndpointProperties, self).__init__(**kwargs) self.connection_string =", "self.condition = kwargs.get('condition', None) self.endpoint_names = kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled']", ":type location: str :param role: The role of the region,", ":vartype type: str \"\"\" _validation = { 'id': {'readonly': True},", "self, **kwargs ): super(FallbackRouteProperties, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.source", "**kwargs ): super(RoutingProperties, self).__init__(**kwargs) self.endpoints = kwargs.get('endpoints', None) self.routes =", "is not set, the messages which do not meet any", "None) class RoutingTwin(msrest.serialization.Model): \"\"\"Twin reference input parameter. This is an", "None self.updated = None self.certificate = kwargs.get('certificate', None) class CertificatePropertiesWithNonce(msrest.serialization.Model):", "\"firmwareUpdate\". :vartype type: str or ~azure.mgmt.iothub.v2019_11_04.models.JobType :ivar status: The status", "EnrichmentProperties(msrest.serialization.Model): \"\"\"The properties of an enrichment that your IoT hub", "cloud_to_device: The IoT hub cloud-to-device messaging properties. :type cloud_to_device: ~azure.mgmt.iothub.v2019_11_04.models.CloudToDeviceProperties", "None self.etag = None self.type = None class CertificateListDescription(msrest.serialization.Model): \"\"\"The", "{'key': 'name', 'type': 'str'}, 'source': {'key': 'source', 'type': 'str'}, 'condition':", "'str'}, } def __init__( self, **kwargs ): super(CertificateVerificationDescription, self).__init__(**kwargs) self.certificate", "_attribute_map = { 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'name': {'key':", "_validation = { 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'source': {'required':", "'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'}, 'default_ttl_as_iso8601': {'key': 'defaultTtlAsIso8601', 'type': 'duration'},", "of testing a route. :param compilation_errors: JSON-serialized list of route", "None) self.localized_value = kwargs.get('localized_value', None) class Operation(msrest.serialization.Model): \"\"\"IoT Hub REST", "result of testing a route. :param compilation_errors: JSON-serialized list of", "maximum length of 64 characters, and must be unique. :type", "routing: ~azure.mgmt.iothub.v2019_11_04.models.RoutingProperties :param storage_endpoints: The list of Azure Storage endpoints", "The certificate's expiration date and time. :vartype expiry: ~datetime.datetime :ivar", "{'key': 'key', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'endpoint_names':", "None) class TestRouteResultDetails(msrest.serialization.Model): \"\"\"Detailed result of testing a route. :param", "will be used for proof of possession. :vartype verification_code: str", "access policy. Possible values include: \"RegistryRead\", \"RegistryWrite\", \"ServiceConnect\", \"DeviceConnect\", \"RegistryRead,", "of the scaling enabled. Possible values include: \"Automatic\", \"Manual\", \"None\".", "self.updated = None self.certificate = kwargs.get('certificate', None) class CertificatePropertiesWithNonce(msrest.serialization.Model): \"\"\"The", "DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect\", \"RegistryRead, RegistryWrite, DeviceConnect\", \"RegistryRead, ServiceConnect, DeviceConnect\",", "units. See: https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits. :type capacity: long \"\"\" _validation = {", "class RouteErrorPosition(msrest.serialization.Model): \"\"\"Position where the route error happened. :param line:", "'resource': {'key': 'resource', 'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'},", "which the messages that satisfy the condition are routed to.", "super(MessagingEndpointProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601', None)", "__init__( self, **kwargs ): super(IotHubDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None)", "self.subscription_id = kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) class RoutingMessage(msrest.serialization.Model):", "route: Required. Route properties. :type route: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties :param twin: Routing", "'[JobResponse]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self,", "'type': 'object'}, 'reported': {'key': 'reported', 'type': 'object'}, } def __init__(", "sas_ttl_as_iso8601: The period of time for which the SAS URI", "of the quota metric. :vartype name: str :ivar current_value: The", "to. Currently only 1 endpoint is allowed. :type endpoint_names: list[str]", "'long'}, 'scale_type': {'key': 'scaleType', 'type': 'str'}, } def __init__( self,", "'failover_region': {'required': True}, } _attribute_map = { 'failover_region': {'key': 'failoverRegion',", "'tags': {'key': 'tags', 'type': 'object'}, 'properties': {'key': 'properties', 'type': 'RoutingTwinProperties'},", "'rfc-1123'}, 'updated': {'key': 'updated', 'type': 'rfc-1123'}, 'verification_code': {'key': 'verificationCode', 'type':", "None) self.unit = kwargs.get('unit', None) self.current_value = kwargs.get('current_value', None) self.limit", "__init__( self, **kwargs ): super(CertificateVerificationDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None)", "files. Currently you can configure only one Azure Storage account", "def __init__( self, **kwargs ): super(CertificateListDescription, self).__init__(**kwargs) self.value = kwargs.get('value',", "The count of disabled devices in the identity registry. :vartype", "kwargs.get('routing_source', None) self.message = kwargs.get('message', None) self.twin = kwargs.get('twin', None)", "'endpointId', 'type': 'str'}, 'health_status': {'key': 'healthStatus', 'type': 'str'}, } def", "values include: \"Free\", \"Standard\", \"Basic\". :vartype tier: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuTier", "'id', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'unit': {'key':", "'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key':", "policies. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :ivar next_link: The next link. :vartype", "properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificateProperties :ivar id: The resource identifier. :vartype id: str", "including the challenge nonce issued for the Proof-Of-Possession flow. :type", "'str'}, 'role': {'key': 'role', 'type': 'str'}, } def __init__( self,", "None self.state = None self.host_name = None self.event_hub_endpoints = kwargs.get('event_hub_endpoints',", "kwargs.get('cloud_to_device', None) self.comments = kwargs.get('comments', None) self.features = kwargs.get('features', None)", "account. :type subscription_id: str :param resource_group: The name of the", "'type': 'str'}, } def __init__( self, **kwargs ): super(CertificateWithNonceDescription, self).__init__(**kwargs)", "super(FailoverInput, self).__init__(**kwargs) self.failover_region = kwargs['failover_region'] class FallbackRouteProperties(msrest.serialization.Model): \"\"\"The properties of", "): super(Name, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.localized_value = kwargs.get('localized_value',", "that IoT hub routes the messages to, based on the", "_attribute_map = { 'endpoints': {'key': 'endpoints', 'type': 'RoutingEndpoints'}, 'routes': {'key':", "The reason for unavailability. Possible values include: \"Invalid\", \"AlreadyExists\". :vartype", "identifier of the service bus topic endpoint. :type subscription_id: str", "\"\"\" _validation = { 'name': {'readonly': True}, 'current_value': {'readonly': True},", "None class StorageEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the Azure Storage endpoint", "Required. The output blob container URI. :type output_blob_container_uri: str \"\"\"", "blob container URI. :type input_blob_container_uri: str :param output_blob_container_uri: Required. The", "permissions assigned to the shared access policy. Possible values include:", "The value which indicates whether the provided name is available.", ":type health_status: str or ~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthStatus \"\"\" _attribute_map = { 'endpoint_id':", "routing rules are allowed for paid hubs and a maximum", "class RoutingMessage(msrest.serialization.Model): \"\"\"Routing message. :param body: Body of routing message.", "{'readonly': True}, } _attribute_map = { 'provider': {'key': 'provider', 'type':", "files are uploaded. :type connection_string: str :param container_name: Required. The", "are allowed across all endpoint types for paid hubs and", "that the routing rule is to be applied to, such", "object :param properties: :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwinProperties \"\"\" _attribute_map = {", "{'key': 'name', 'type': 'Name'}, } def __init__( self, **kwargs ):", "host_name: str :param event_hub_endpoints: The Event Hub-compatible endpoint properties. The", "'str'}, 'failure_reason': {'key': 'failureReason', 'type': 'str'}, 'status_message': {'key': 'statusMessage', 'type':", "str \"\"\" _validation = { 'filter_name': {'required': True}, 'action': {'required':", "expiry: The certificate's expiration date and time. :vartype expiry: ~datetime.datetime", "names must be unique across endpoint types. :type name: str", "where you can upload files. Currently you can configure only", "'str'}, } def __init__( self, **kwargs ): super(ImportDevicesRequest, self).__init__(**kwargs) self.input_blob_container_uri", "connection with the endpoint. No messages have been delivered to", "registry statistics. Variables are only populated by the server, and", "testing route. :type details: ~azure.mgmt.iothub.v2019_11_04.models.TestRouteResultDetails \"\"\" _attribute_map = { 'result':", "certificate: The certificate content. :type certificate: str \"\"\" _validation =", ":vartype failure_reason: str :ivar status_message: The status message for the", "Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _attribute_map = { 'routing_source':", "{'readonly': True}, 'parent_job_id': {'readonly': True}, } _attribute_map = { 'job_id':", "JobResponse objects with a next link. Variables are only populated", "parameter. This is an optional parameter. :param tags: A set", "conditions specified in the 'routes' section get routed to the", "end_time_utc: The time the job stopped processing. :vartype end_time_utc: ~datetime.datetime", "name of the Azure region. :type location: str :param role:", "name of the SKU. Possible values include: \"F1\", \"S1\", \"S2\",", "route. All required parameters must be populated in order to", "super(IotHubQuotaMetricInfoListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class", "True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[UserSubscriptionQuota]'},", "{ 'code': {'key': 'code', 'type': 'str'}, 'http_status_code': {'key': 'httpStatusCode', 'type':", "rule. :type filter_name: str :param action: Required. The desired action", "does not include the built-in Event Hubs endpoint. :type event_hubs:", "by IoT Hub for file upload is valid. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-", "self.output_blob_container_uri = kwargs['output_blob_container_uri'] class IotHubCapacity(msrest.serialization.Model): \"\"\"IoT Hub capacity information. Variables", "= kwargs.get('storage_containers', None) class RoutingEventHubProperties(msrest.serialization.Model): \"\"\"The properties related to an", "self.status_message = None self.parent_job_id = None class JobResponseListResult(msrest.serialization.Model): \"\"\"The JSON-serialized", "True}, } _attribute_map = { 'export_blob_container_uri': {'key': 'exportBlobContainerUri', 'type': 'str'},", "'properties': {'key': 'properties', 'type': 'RouteProperties'}, } def __init__( self, **kwargs", "fallback_route: ~azure.mgmt.iothub.v2019_11_04.models.FallbackRouteProperties :param enrichments: The list of user-provided enrichments that", "exclude_keys: bool \"\"\" _validation = { 'export_blob_container_uri': {'required': True}, 'exclude_keys':", "__init__( self, **kwargs ): super(UserSubscriptionQuotaListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None)", "number of units. :vartype minimum: long :ivar maximum: The maximum", "to Azure. :param message: Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param", "'file_name_format': {'key': 'fileNameFormat', 'type': 'str'}, 'batch_frequency_in_seconds': {'key': 'batchFrequencyInSeconds', 'type': 'int'},", "routes: JSON-serialized array of matched routes. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.MatchedRoute] \"\"\"", "\"S3\", \"B1\", \"B2\", \"B3\". :type name: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSku :ivar", "only populated by the server, and will be ignored when", "self, **kwargs ): super(RoutingStorageContainerProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name =", "self.created = None self.updated = None self.certificate = kwargs.get('certificate', None)", "Default value is 300 seconds. :type batch_frequency_in_seconds: int :param max_chunk_size_in_bytes:", "= kwargs['container_name'] self.file_name_format = kwargs.get('file_name_format', None) self.batch_frequency_in_seconds = kwargs.get('batch_frequency_in_seconds', None)", "self, **kwargs ): super(IotHubSkuInfo, self).__init__(**kwargs) self.name = kwargs['name'] self.tier =", "in order to send to Azure. :param failover_region: Required. Region", ":type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubQuotaMetricInfo] :ivar next_link: The next link. :vartype next_link:", "'str'}, } def __init__( self, **kwargs ): super(UserSubscriptionQuotaListResult, self).__init__(**kwargs) self.value", "ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect, DeviceConnect\". :type rights: str or", "**kwargs ): super(RoutingTwinProperties, self).__init__(**kwargs) self.desired = kwargs.get('desired', None) self.reported =", "Resource Type: IotHubs. :vartype resource: str :ivar operation: Name of", "kwargs.get('authorization_policies', None) self.ip_filter_rules = kwargs.get('ip_filter_rules', None) self.provisioning_state = None self.state", "the Job Response object. Variables are only populated by the", "{ 'key_name': {'key': 'keyName', 'type': 'str'}, 'primary_key': {'key': 'primaryKey', 'type':", "'JSON'. Default value is 'avro'. Possible values include: \"Avro\", \"AvroDeflate\",", "'{StorageEndpointProperties}'}, 'messaging_endpoints': {'key': 'messagingEndpoints', 'type': '{MessagingEndpointProperties}'}, 'enable_file_upload_notifications': {'key': 'enableFileUploadNotifications', 'type':", "_validation = { 'route': {'required': True}, } _attribute_map = {", "access policy. All required parameters must be populated in order", "a list of operations and a URL link to get", "before it is expired by the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-", "has a maximum length of 64 characters. The following names", "'type': {'key': 'type', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'},", "containing the reason for the failure. :vartype failure_reason: str :ivar", "fallback route. IoT Hub uses these properties when it routes", "True}, } _attribute_map = { 'job_id': {'key': 'jobId', 'type': 'str'},", "super(OperationInputs, self).__init__(**kwargs) self.name = kwargs['name'] class OperationListResult(msrest.serialization.Model): \"\"\"Result of the", "paid hubs and only 1 custom endpoint is allowed across", "the feedback queue for cloud-to-device messages. :type feedback: ~azure.mgmt.iothub.v2019_11_04.models.FeedbackProperties \"\"\"", "endpoints where you can upload files. Currently you can configure", "of tags. Resource tags. :type tags: dict[str, str] \"\"\" _attribute_map", "not established a connection with the endpoint. No messages have", "True, 'min_items': 1}, } _attribute_map = { 'key': {'key': 'key',", "{'key': 'hostName', 'type': 'str'}, 'event_hub_endpoints': {'key': 'eventHubEndpoints', 'type': '{EventHubProperties}'}, 'routing':", "IotHubProperties(msrest.serialization.Model): \"\"\"The properties of an IoT hub. Variables are only", "long :ivar scale_type: The type of the scaling enabled. Possible", "str \"\"\" _validation = { 'provider': {'readonly': True}, 'resource': {'readonly':", "job_id: str :ivar start_time_utc: The start time of the job.", "possible keys to this dictionary is events. This key has", "list[str] :param is_enabled: Required. Used to specify whether the fallback", ":param enrichments: The list of user-provided enrichments that the IoT", ":ivar updated: The certificate's last update date and time. :vartype", "to get the next set of results. Variables are only", "~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthStatus \"\"\" _attribute_map = { 'endpoint_id': {'key': 'endpointId', 'type': 'str'},", "class IotHubLocationDescription(msrest.serialization.Model): \"\"\"Public representation of one of the locations where", "messages based on the routing rules. A maximum of 10", "_validation = { 'provider': {'readonly': True}, 'resource': {'readonly': True}, 'operation':", "end: End where the route error happened. :type end: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition", "None) self.event_hubs = kwargs.get('event_hubs', None) self.storage_containers = kwargs.get('storage_containers', None) class", "super(EnrichmentProperties, self).__init__(**kwargs) self.key = kwargs['key'] self.value = kwargs['value'] self.endpoint_names =", "= kwargs.get('resource_group', None) class RoutingServiceBusTopicEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to service", "role of the region, can be either primary or secondary.", "None) class MessagingEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the messaging endpoints used", "True}, 'etag': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map =", "these properties when it routes messages to the fallback endpoint.", "} def __init__( self, **kwargs ): super(Resource, self).__init__(**kwargs) self.id =", ":param action: Required. The desired action for requests captured by", "across all endpoint types for free hubs. :param service_bus_queues: The", "} def __init__( self, **kwargs ): super(EnrichmentProperties, self).__init__(**kwargs) self.key =", "characters. The following names are reserved: events, fileNotifications, $default. Endpoint", "'avro', 'avrodeflate', and 'JSON'. Default value is 'avro'. Possible values", "class ExportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters when requesting an export", "{'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'source': {'required': True}, 'endpoint_names': {'required': True,", "of the shared access policy. :type key_name: str :param primary_key:", "IoT hub. All required parameters must be populated in order", "mandatory but can be reordered. :type file_name_format: str :param batch_frequency_in_seconds:", "'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'subscription_id': {'key': 'subscriptionId',", "type. :type limit: int :param name: IotHub type. :type name:", "self.type = None class CloudToDeviceProperties(msrest.serialization.Model): \"\"\"The IoT hub cloud-to-device messaging", "The certificate's last update date and time. :vartype updated: ~datetime.datetime", "device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to- device-messages. :type max_delivery_count: int :param default_ttl_as_iso8601:", "None class RegistryStatistics(msrest.serialization.Model): \"\"\"Identity registry statistics. Variables are only populated", "include: \"primary\", \"secondary\". :type role: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubReplicaRoleType \"\"\" _attribute_map", "Supported values are 'avro', 'avrodeflate', and 'JSON'. Default value is", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#cloud-to-device-messages. :type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count: The number of", "self).__init__(**kwargs) self.name_available = None self.reason = None self.message = kwargs.get('message',", "rule. If no condition is provided, it evaluates to true", "self.start_time_utc = None self.end_time_utc = None self.type = None self.status", "Operation(msrest.serialization.Model): \"\"\"IoT Hub REST API operation. Variables are only populated", "= kwargs.get('name', None) class UserSubscriptionQuotaListResult(msrest.serialization.Model): \"\"\"Json-serialized array of User subscription", "are allowed for paid hubs and a maximum of 5", "http_status_code: The HTTP status code. :vartype http_status_code: str :ivar message:", "identifier. :vartype id: str :ivar name: The name of the", "): super(EventHubProperties, self).__init__(**kwargs) self.retention_time_in_days = kwargs.get('retention_time_in_days', None) self.partition_count = kwargs.get('partition_count',", "enabled_device_count: long :ivar disabled_device_count: The count of disabled devices in", "an optional parameter. When this property is not set, the", "objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.CertificateDescription] \"\"\" _attribute_map = { 'value': {'key':", "): super(IotHubProperties, self).__init__(**kwargs) self.authorization_policies = kwargs.get('authorization_policies', None) self.ip_filter_rules = kwargs.get('ip_filter_rules',", "'updated': {'readonly': True}, 'verification_code': {'readonly': True}, 'certificate': {'readonly': True}, }", "the Tags for a resource, allowing the user to update", "on the routing rules. :type service_bus_queues: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusQueueEndpointProperties] :param service_bus_topics: The", "The IoT hub cloud-to-device messaging properties. :type cloud_to_device: ~azure.mgmt.iothub.v2019_11_04.models.CloudToDeviceProperties :param", "an IoT hub shared access policy. All required parameters must", "'str'}, } def __init__( self, **kwargs ): super(StorageEndpointProperties, self).__init__(**kwargs) self.sas_ttl_as_iso8601", "self.resource_type = None self.sku = kwargs['sku'] self.capacity = kwargs['capacity'] class", "'message', 'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubNameAvailabilityInfo,", "self.line = kwargs.get('line', None) self.column = kwargs.get('column', None) class RouteErrorRange(msrest.serialization.Model):", "UserSubscriptionQuotaListResult(msrest.serialization.Model): \"\"\"Json-serialized array of User subscription quota response. Variables are", "rule. :type ip_mask: str \"\"\" _validation = { 'filter_name': {'required':", "{'key': 'routingSource', 'type': 'str'}, 'message': {'key': 'message', 'type': 'RoutingMessage'}, 'twin':", "str :ivar details: The error details. :vartype details: str \"\"\"", "registry. :vartype enabled_device_count: long :ivar disabled_device_count: The count of disabled", "resource group of the storage account. :type resource_group: str :param", "will be updated to healthy when IoT Hub has established", "): super(TestRouteResult, self).__init__(**kwargs) self.result = kwargs.get('result', None) self.details = kwargs.get('details',", "**kwargs ): super(JobResponseListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link =", "the resource group of the service bus queue endpoint. :type", "{'key': 'verificationCode', 'type': 'str'}, 'certificate': {'key': 'certificate', 'type': 'str'}, }", "JSON-serialized array of matched routes. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.MatchedRoute] \"\"\" _attribute_map", "**kwargs ): super(Operation, self).__init__(**kwargs) self.name = None self.display = kwargs.get('display',", "{'readonly': True}, 'name': {'readonly': True, 'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly': True},", "met. This is an optional parameter. When this property is", "condition which is evaluated in order to apply the fallback", "the challenge nonce issued for the Proof-Of-Possession flow. Variables are", "JSON-serialized array of IotHubDescription objects with a next link. Variables", "Azure. :param filter_name: Required. The name of the IP filter", "error to be thrown. Not specifying a value for this", "are met. This is an optional parameter. When this property", "health_status: Health statuses have following meanings. The 'healthy' status shows", "has been verified. :vartype is_verified: bool :ivar created: The certificate's", "of IotHub type. :type unit: str :param current_value: Current number", "= { 'name_available': {'readonly': True}, 'reason': {'readonly': True}, } _attribute_map", "= kwargs.get('endpoint_id', None) self.health_status = kwargs.get('health_status', None) class EndpointHealthDataListResult(msrest.serialization.Model): \"\"\"The", "} def __init__( self, **kwargs ): super(ImportDevicesRequest, self).__init__(**kwargs) self.input_blob_container_uri =", ":param role: The role of the region, can be either", "super(FeedbackProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601', None)", "status of an unhealthy endpoint will be updated to healthy", "super(UserSubscriptionQuota, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.type = kwargs.get('type', None)", "is used as a fall-back route when none of the", "'type': 'long'}, } def __init__( self, **kwargs ): super(IotHubSkuInfo, self).__init__(**kwargs)", "kwargs['sku'] class IotHubDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubDescription objects with", "to Azure. :ivar id: The resource identifier. :vartype id: str", "{'readonly': True}, } _attribute_map = { 'job_id': {'key': 'jobId', 'type':", "Hub metrics to identify errors and monitor issues with endpoints.", "file upload is valid. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload#file-upload-notification-configuration-options. :type sas_ttl_as_iso8601: ~datetime.timedelta", "List of consumer groups objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EventHubConsumerGroupInfo] :ivar next_link:", "'end', 'type': 'RouteErrorPosition'}, } def __init__( self, **kwargs ): super(RouteErrorRange,", "Event Hub-compatible name. :vartype path: str :ivar endpoint: The Event", "'compilationErrors', 'type': '[RouteCompilationError]'}, } def __init__( self, **kwargs ): super(TestRouteResultDetails,", "in the dictionary while making create or update calls for", "location: Location where the route error happened. :type location: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorRange", "kwargs.get('capacity', None) class IpFilterRule(msrest.serialization.Model): \"\"\"The IP filter rules for the", "populated in order to send to Azure. :param sas_ttl_as_iso8601: The", "Event Hub-compatible endpoint. :vartype endpoint: str \"\"\" _validation = {", "'expiry': {'key': 'expiry', 'type': 'rfc-1123'}, 'thumbprint': {'key': 'thumbprint', 'type': 'str'},", "for the Proof-Of-Possession flow. Variables are only populated by the", "'updated', 'type': 'rfc-1123'}, 'verification_code': {'key': 'verificationCode', 'type': 'str'}, 'certificate': {'key':", "of the root container where you upload files. The container", "= kwargs.get('capacity', None) class IpFilterRule(msrest.serialization.Model): \"\"\"The IP filter rules for", "the endpoint. No messages have been delivered to or rejected", ":vartype is_verified: bool :ivar created: The certificate's create date and", "rules. :type service_bus_topics: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusTopicEndpointProperties] :param event_hubs: The list of Event", "value indicating whether keys should be excluded during export. :type", "\"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource", "~azure.mgmt.iothub.v2019_11_04.models.TestRouteResultDetails \"\"\" _attribute_map = { 'result': {'key': 'result', 'type': 'str'},", "{'key': 'details', 'type': 'TestRouteResultDetails'}, } def __init__( self, **kwargs ):", "EndpointHealthData objects with a next link. Variables are only populated", "int :param default_ttl_as_iso8601: The default time to live for cloud-to-device", ":type name: ~azure.mgmt.iothub.v2019_11_04.models.Name \"\"\" _attribute_map = { 'id': {'key': 'id',", "{'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, } _attribute_map", "self).__init__(**kwargs) self.key = kwargs['key'] self.value = kwargs['value'] self.endpoint_names = kwargs['endpoint_names']", "of JobResponse objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.JobResponse] :ivar next_link: The next", "None self.etag = None class EventHubConsumerGroupsListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of", "} _attribute_map = { 'resource_type': {'key': 'resourceType', 'type': 'str'}, 'sku':", ":vartype scale_type: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubScaleType \"\"\" _validation = { 'minimum':", "endpoints to which your IoT hub routes messages based on", "= None self.state = None self.host_name = None self.event_hub_endpoints =", "'type': 'duration'}, 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'container_name': {'key': 'containerName',", "{'key': 'display', 'type': 'OperationDisplay'}, } def __init__( self, **kwargs ):", "self.twin = kwargs.get('twin', None) class TestRouteResult(msrest.serialization.Model): \"\"\"Result of testing one", "'sku', 'type': 'IotHubSkuInfo'}, 'capacity': {'key': 'capacity', 'type': 'IotHubCapacity'}, } def", "None) class TestAllRoutesInput(msrest.serialization.Model): \"\"\"Input for testing all routes. :param routing_source:", ":type endpoints: ~azure.mgmt.iothub.v2019_11_04.models.RoutingEndpoints :param routes: The list of user-provided routing", "order to send to Azure. :param name: The name of", "= kwargs.get('message', None) self.route = kwargs['route'] self.twin = kwargs.get('twin', None)", "= None class CertificateVerificationDescription(msrest.serialization.Model): \"\"\"The JSON-serialized leaf certificate. :param certificate:", "you can configure only one Azure Storage account and that", "Endpoint names must be unique across endpoint types. The name", "= kwargs.get('twin', None) class TestRouteResult(msrest.serialization.Model): \"\"\"Result of testing one route.", "super(RoutingServiceBusTopicEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id =", "'path': {'readonly': True}, 'endpoint': {'readonly': True}, } _attribute_map = {", "tier for the IoT hub. Possible values include: \"Free\", \"Standard\",", "'type': 'str'}, } def __init__( self, **kwargs ): super(RoutingEventHubProperties, self).__init__(**kwargs)", "messages in the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to- device-messages. :type max_delivery_count:", "IP address range in CIDR notation for the rule. :type", "be populated in order to send to Azure. :param export_blob_container_uri:", "route. :param message: Route error message. :type message: str :param", "'str'}, 'details': {'key': 'details', 'type': 'TestRouteResultDetails'}, } def __init__( self,", "kwargs.get('feedback', None) class EndpointHealthData(msrest.serialization.Model): \"\"\"The health data for an endpoint.", "'id': {'key': 'id', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'},", "access policies with a next link. Variables are only populated", "to update the tags on an IoT Hub instance. :param", "details. Variables are only populated by the server, and will", "properties. The only possible keys to this dictionary is events.", "Location where the route error happened. :type location: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorRange \"\"\"", "{'key': 'partitionIds', 'type': '[str]'}, 'path': {'key': 'path', 'type': 'str'}, 'endpoint':", "subscription identifier of the event hub endpoint. :type subscription_id: str", "where the route error happened. :param line: Line where the", "= { 'value': {'key': 'value', 'type': '[UserSubscriptionQuota]'}, 'next_link': {'key': 'nextLink',", "IoT hub applies to messages delivered to endpoints. All required", "in order to send to Azure. :param name: The name", "{'key': 'tags', 'type': 'object'}, 'properties': {'key': 'properties', 'type': 'RoutingTwinProperties'}, }", "class TestRouteResult(msrest.serialization.Model): \"\"\"Result of testing one route. :param result: Result", "dict[str, str] \"\"\" _validation = { 'id': {'readonly': True}, 'name':", "self.input_blob_container_uri = kwargs['input_blob_container_uri'] self.output_blob_container_uri = kwargs['output_blob_container_uri'] class IotHubCapacity(msrest.serialization.Model): \"\"\"IoT Hub", "meanings. The 'healthy' status shows that the endpoint is accepting", "= kwargs.get('value', None) self.next_link = None class IotHubSkuInfo(msrest.serialization.Model): \"\"\"Information about", "\"\"\" _attribute_map = { 'endpoint_id': {'key': 'endpointId', 'type': 'str'}, 'health_status':", "properties: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties \"\"\" _attribute_map = { 'properties': {'key': 'properties', 'type':", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :type routing: ~azure.mgmt.iothub.v2019_11_04.models.RoutingProperties :param storage_endpoints: The list of Azure", "The output blob container URI. :type output_blob_container_uri: str \"\"\" _validation", "'str'}, 'status': {'key': 'status', 'type': 'str'}, 'failure_reason': {'key': 'failureReason', 'type':", "= None self.max_value = None class IotHubQuotaMetricInfoListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array", "'[IotHubLocationDescription]'}, } def __init__( self, **kwargs ): super(IotHubProperties, self).__init__(**kwargs) self.authorization_policies", "tags on an IoT Hub instance. :param tags: A set", "where the route error happened. :type column: int \"\"\" _attribute_map", "host_name: The name of the host. :vartype host_name: str :param", "error when evaluating route. :param message: Route error message. :type", "True}, 'expiry': {'readonly': True}, 'thumbprint': {'readonly': True}, 'is_verified': {'readonly': True},", "in the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type default_ttl_as_iso8601: ~datetime.timedelta", "'provisioningState', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'host_name': {'key':", "'type': '[IpFilterRule]'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'state': {'key': 'state',", "messages in the Event Hub-compatible endpoint. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#device-to-cloud-messages. :type", "job identifier. :vartype job_id: str :ivar start_time_utc: The start time", "'type': 'str'}, 'condition': {'key': 'condition', 'type': 'str'}, 'endpoint_names': {'key': 'endpointNames',", ":type name: str :param source: Required. The source that the", "condition is not provided it will evaluate to true by", "the Azure disaster recovery (DR) paired region and also the", "None) self.provisioning_state = None self.state = None self.host_name = None", "kwargs.get('properties', None) self.id = None self.name = None self.etag =", "= kwargs.get('file_name_format', None) self.batch_frequency_in_seconds = kwargs.get('batch_frequency_in_seconds', None) self.max_chunk_size_in_bytes = kwargs.get('max_chunk_size_in_bytes',", "fallback endpoint. All required parameters must be populated in order", "= { 'minimum': {'key': 'minimum', 'type': 'long'}, 'maximum': {'key': 'maximum',", "only 1 endpoint is allowed. :type endpoint_names: list[str] :param is_enabled:", "features: str or ~azure.mgmt.iothub.v2019_11_04.models.Capabilities :ivar locations: Primary and secondary location", "{'maximum': 524288000, 'minimum': 10485760}, } _attribute_map = { 'connection_string': {'key':", "None) self.limit = kwargs.get('limit', None) self.name = kwargs.get('name', None) class", "True}, } _attribute_map = { 'subject': {'key': 'subject', 'type': 'str'},", "__init__( self, **kwargs ): super(EndpointHealthData, self).__init__(**kwargs) self.endpoint_id = kwargs.get('endpoint_id', None)", "self.endpoint_names = kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class RoutingEndpoints(msrest.serialization.Model): \"\"\"The properties", "'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'},", "container endpoint. All required parameters must be populated in order", "type: The type of the job. Possible values include: \"unknown\",", "which the routing rule is to be applied to. For", ":ivar resource: Resource Type: IotHubs. :vartype resource: str :ivar operation:", "hub. :vartype locations: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubLocationDescription] \"\"\" _validation = { 'provisioning_state': {'readonly':", ":type action: str or ~azure.mgmt.iothub.v2019_11_04.models.IpFilterActionType :param ip_mask: Required. A string", "None class IotHubQuotaMetricInfoListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubQuotaMetricInfo objects with", "which your IoT hub routes messages based on the routing", ":type subscription_id: str :param resource_group: The name of the resource", "enableFileUploadNotifications property is set to True, causes an error to", "class IotHubDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubDescription objects with a", "of IotHubDescription objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubDescription] :ivar next_link: The next", "int :param name: IotHub type. :type name: ~azure.mgmt.iothub.v2019_11_04.models.Name \"\"\" _attribute_map", "None class IotHubLocationDescription(msrest.serialization.Model): \"\"\"Public representation of one of the locations", "self, **kwargs ): super(RoutingTwinProperties, self).__init__(**kwargs) self.desired = kwargs.get('desired', None) self.reported", ":param resource_group: The name of the resource group of the", "blobs. Supported values are 'avro', 'avrodeflate', and 'JSON'. Default value", "None self.etag = None self.type = None class CloudToDeviceProperties(msrest.serialization.Model): \"\"\"The", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :param endpoints: The properties related to the custom endpoints", "{'key': 'updated', 'type': 'rfc-1123'}, 'certificate': {'key': 'certificate', 'type': 'str'}, }", "self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id = None self.name =", "Required. The name of the root container where you upload", "IoT hub can failover to. Possible values include: \"primary\", \"secondary\".", "sending a request. :param value: The array of IotHubSkuDescription. :type", "super(RouteCompilationError, self).__init__(**kwargs) self.message = kwargs.get('message', None) self.severity = kwargs.get('severity', None)", ":param endpoint_names: Required. The list of endpoints to which the", "and will be ignored when sending a request. :param properties:", "tags: dict[str, str] :param etag: The Etag field is *not*", "{ 'message': {'key': 'message', 'type': 'str'}, 'severity': {'key': 'severity', 'type':", "are uploaded. :type connection_string: str :param container_name: Required. The name", "be between 60 and 720 seconds. Default value is 300", "IoT hub attempts to deliver a message on the feedback", "dictionary while making create or update calls for the IoT", "route that is used as a fall-back route when none", "of IotHubQuotaMetricInfo objects with a next link. Variables are only", "name: str :param subscription_id: The subscription identifier of the service", "'rights': {'key': 'rights', 'type': 'str'}, } def __init__( self, **kwargs", "None self.status_message = None self.parent_job_id = None class JobResponseListResult(msrest.serialization.Model): \"\"\"The", "one endpoint is allowed. :type endpoint_names: list[str] :param is_enabled: Required.", "The name that identifies this endpoint. The name can only", "the code is regenerated. # -------------------------------------------------------------------------- from azure.core.exceptions import HttpResponseError", ":type reported: object \"\"\" _attribute_map = { 'desired': {'key': 'desired',", "True, file upload notifications are enabled. :type enable_file_upload_notifications: bool :param", "**kwargs ): super(IpFilterRule, self).__init__(**kwargs) self.filter_name = kwargs['filter_name'] self.action = kwargs['action']", "_validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type':", "True}, 'rights': {'required': True}, } _attribute_map = { 'key_name': {'key':", "} _attribute_map = { 'failover_region': {'key': 'failoverRegion', 'type': 'str'}, }", "coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights", "endpoint types. :type name: str :param subscription_id: The subscription identifier", "key: str :param value: Required. The value for the enrichment", "= kwargs.get('etag', None) self.properties = kwargs.get('properties', None) self.sku = kwargs['sku']", "Azure. :param name: Required. The name of the SKU. Possible", "self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601', None) self.max_delivery_count =", "{'readonly': True}, 'status_message': {'readonly': True}, 'parent_job_id': {'readonly': True}, } _attribute_map", "that satisfy the condition are routed to. Currently only 1", "Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type routing_source:", "'RoutingTwinProperties'}, } def __init__( self, **kwargs ): super(RoutingTwin, self).__init__(**kwargs) self.tags", ":param routing_source: Routing source. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\",", "EndpointHealthData(msrest.serialization.Model): \"\"\"The health data for an endpoint. :param endpoint_id: Id", "message: The detailed reason message. :type message: str \"\"\" _validation", "'job_id': {'key': 'jobId', 'type': 'str'}, 'start_time_utc': {'key': 'startTimeUtc', 'type': 'rfc-1123'},", "'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type':", "list[~azure.mgmt.iothub.v2019_11_04.models.RoutingEventHubProperties] :param storage_containers: The list of storage container endpoints that", "The 'unhealthy' status shows that the endpoint is not accepting", ":ivar message: The error message. :vartype message: str :ivar details:", "= kwargs.get('value', None) self.localized_value = kwargs.get('localized_value', None) class Operation(msrest.serialization.Model): \"\"\"IoT", "} def __init__( self, **kwargs ): super(OperationInputs, self).__init__(**kwargs) self.name =", "kwargs.get('value', None) self.localized_value = kwargs.get('localized_value', None) class Operation(msrest.serialization.Model): \"\"\"IoT Hub", "= { 'key': {'key': 'key', 'type': 'str'}, 'value': {'key': 'value',", "under the MIT License. See License.txt in the project root", "the IoT hub. Variables are only populated by the server,", "{ 'max_delivery_count': {'maximum': 100, 'minimum': 1}, } _attribute_map = {", "instance. :param tags: A set of tags. Resource tags. :type", ":param severity: Severity of the route error. Possible values include:", "job. :vartype status_message: str :ivar parent_job_id: The job identifier of", ":ivar provisioning_state: The provisioning state. :vartype provisioning_state: str :ivar state:", "request. :param value: The array of IotHubDescription objects. :type value:", "populated in order to send to Azure. :param name: Required.", "message. :param body: Body of routing message. :type body: str", "_attribute_map = { 'minimum': {'key': 'minimum', 'type': 'long'}, 'maximum': {'key':", "def __init__( self, **kwargs ): super(RoutingMessage, self).__init__(**kwargs) self.body = kwargs.get('body',", "will be failed over to. :type failover_region: str \"\"\" _validation", "\"RegistryWrite\", \"ServiceConnect\", \"DeviceConnect\", \"RegistryRead, RegistryWrite\", \"RegistryRead, ServiceConnect\", \"RegistryRead, DeviceConnect\", \"RegistryWrite,", "routing rules. This list does not include the built-in Event", "cloud-to-device messages. :type feedback: ~azure.mgmt.iothub.v2019_11_04.models.FeedbackProperties \"\"\" _validation = { 'max_delivery_count':", "True}, 'scale_type': {'readonly': True}, } _attribute_map = { 'minimum': {'key':", "result: str or ~azure.mgmt.iothub.v2019_11_04.models.TestResultStatus :param details: Detailed result of testing", "None self.max_value = None class IotHubQuotaMetricInfoListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of", "{'key': 'endpoints', 'type': 'RoutingEndpoints'}, 'routes': {'key': 'routes', 'type': '[RouteProperties]'}, 'fallback_route':", "kwargs.get('start', None) self.end = kwargs.get('end', None) class RouteProperties(msrest.serialization.Model): \"\"\"The properties", "super(EventHubProperties, self).__init__(**kwargs) self.retention_time_in_days = kwargs.get('retention_time_in_days', None) self.partition_count = kwargs.get('partition_count', None)", "kwargs.get('primary_key', None) self.secondary_key = kwargs.get('secondary_key', None) self.rights = kwargs['rights'] class", "device-to-cloud messages in the Event Hub-compatible endpoint. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#device-to-cloud-messages.", "the routing rules. This list does not include the built-in", "written to storage. Value should be between 10485760(10MB) and 524288000(500MB).", "written to storage. Value should be between 60 and 720", "'etag': {'readonly': True}, } _attribute_map = { 'properties': {'key': 'properties',", "be ignored when sending a request. :ivar name: The name", ":ivar scale_type: The type of the scaling enabled. Possible values", "'unit': {'key': 'unit', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'int'},", "kwargs.get('messaging_endpoints', None) self.enable_file_upload_notifications = kwargs.get('enable_file_upload_notifications', None) self.cloud_to_device = kwargs.get('cloud_to_device', None)", "None class IotHubSkuInfo(msrest.serialization.Model): \"\"\"Information about the SKU of the IoT", "'minimum': 1}, } _attribute_map = { 'max_delivery_count': {'key': 'maxDeliveryCount', 'type':", "maximum of 10 custom endpoints are allowed across all endpoint", "time for device-to-cloud messages in days. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages. :type retention_time_in_days:", "on the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type max_delivery_count: int", "update date and time. :vartype updated: ~datetime.datetime :param certificate: The", "'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'int'}, 'limit': {'key': 'limit',", "certificate's verification code that will be used for proof of", "~azure.mgmt.iothub.v2019_11_04.models.AccessRights \"\"\" _validation = { 'key_name': {'required': True}, 'rights': {'required':", "for the enrichment property. :type key: str :param value: Required.", "'action': {'required': True}, 'ip_mask': {'required': True}, } _attribute_map = {", "'tags', 'type': 'object'}, 'properties': {'key': 'properties', 'type': 'RoutingTwinProperties'}, } def", "of the messaging endpoints used by this IoT hub. :param", "this rule. Possible values include: \"Accept\", \"Reject\". :type action: str", "= kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class RoutingTwinProperties(msrest.serialization.Model): \"\"\"RoutingTwinProperties.", "**kwargs ): super(UserSubscriptionQuota, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.type =", "self.ip_filter_rules = kwargs.get('ip_filter_rules', None) self.provisioning_state = None self.state = None", "resource_type: The type of the resource. :vartype resource_type: str :param", "'str'}, } def __init__( self, **kwargs ): super(CertificateBodyDescription, self).__init__(**kwargs) self.certificate", "captured by this rule. Possible values include: \"Accept\", \"Reject\". :type", "verification_code: The certificate's verification code that will be used for", "{ 'key': {'required': True}, 'value': {'required': True}, 'endpoint_names': {'required': True,", "{'key': 'name', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'long'}, 'max_value':", "eventhub endpoint. :type fallback_route: ~azure.mgmt.iothub.v2019_11_04.models.FallbackRouteProperties :param enrichments: The list of", "JSON-serialized list of route compilation errors. :type compilation_errors: list[~azure.mgmt.iothub.v2019_11_04.models.RouteCompilationError] \"\"\"", ":vartype current_value: long :ivar max_value: The maximum value of the", "in the identity registry. :vartype disabled_device_count: long \"\"\" _validation =", "count of devices in the identity registry. :vartype total_device_count: long", "be present in the dictionary while making create or update", "are allowed for free hubs. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.RouteProperties] :param fallback_route:", "of Iot Hub type. :param value: IotHub type. :type value:", "name: The Event Hub-compatible consumer group name. :vartype name: str", "\"\"\"The X509 Certificate. Variables are only populated by the server,", "failure. :vartype failure_reason: str :ivar status_message: The status message for", "populated in order to send to Azure. :param filter_name: Required.", "'type': 'str'}, } def __init__( self, **kwargs ): super(RoutingServiceBusTopicEndpointProperties, self).__init__(**kwargs)", "Body of routing message. :type body: str :param app_properties: App", "The next link. :vartype next_link: str \"\"\" _validation = {", "'resource': {'readonly': True}, 'operation': {'readonly': True}, 'description': {'readonly': True}, }", "'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubLocationDescription, self).__init__(**kwargs)", "'thumbprint': {'key': 'thumbprint', 'type': 'str'}, 'is_verified': {'key': 'isVerified', 'type': 'bool'},", "Azure. :param name: The name of the route. The name", "will be ignored when sending a request. :param value: The", "'str'}, } def __init__( self, **kwargs ): super(OperationListResult, self).__init__(**kwargs) self.value", ":type secondary_key: str :param rights: Required. The permissions assigned to", "lock duration. See: https://docs.microsoft.com/azure/iot- hub/iot-hub-devguide-file-upload. :type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601:", "bool \"\"\" _validation = { 'source': {'required': True}, 'endpoint_names': {'required':", "hub applies to messages to be delivered to built-in and", "self.partition_count = kwargs.get('partition_count', None) self.partition_ids = None self.path = None", ":type name: str :param subscription_id: The subscription identifier of the", "when sending a request. :param value: The list of shared", "service bus topic endpoint. :type connection_string: str :param name: Required.", "order to send to Azure. :param key: Required. The key", "100 routing rules are allowed for paid hubs and a", "class ErrorDetails(msrest.serialization.Model): \"\"\"Error details. Variables are only populated by the", "start_time_utc: ~datetime.datetime :ivar end_time_utc: The time the job stopped processing.", "disabled_device_count: The count of disabled devices in the identity registry.", "'end': {'key': 'end', 'type': 'RouteErrorPosition'}, } def __init__( self, **kwargs", "'str'}, } def __init__( self, **kwargs ): super(JobResponseListResult, self).__init__(**kwargs) self.value", "a request. :param retention_time_in_days: The retention time for device-to-cloud messages", "'connection_string': {'key': 'connectionString', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'},", "request. :param value: The array of quota metrics objects. :type", "is enabled. :type is_enabled: bool \"\"\" _validation = { 'source':", "str :ivar is_verified: Determines whether certificate has been verified. :vartype", "\"\"\"Use to provide failover region when requesting manual Failover for", "IoT hub. Possible values include: \"Free\", \"Standard\", \"Basic\". :vartype tier:", "array of User subscription quota response. Variables are only populated", "values include: \"None\", \"DeviceManagement\". :type features: str or ~azure.mgmt.iothub.v2019_11_04.models.Capabilities :ivar", "kwargs.get('comments', None) self.features = kwargs.get('features', None) self.locations = None class", ":type location: str :param tags: A set of tags. The", "'name_available': {'key': 'nameAvailable', 'type': 'bool'}, 'reason': {'key': 'reason', 'type': 'str'},", "routes the messages to, based on the routing rules. :type", "applied to the message. :type endpoint_names: list[str] \"\"\" _validation =", "self.desired = kwargs.get('desired', None) self.reported = kwargs.get('reported', None) class SharedAccessSignatureAuthorizationRule(msrest.serialization.Model):", "System properties. :type system_properties: dict[str, str] \"\"\" _attribute_map = {", "{'key': 'endTimeUtc', 'type': 'rfc-1123'}, 'type': {'key': 'type', 'type': 'str'}, 'status':", "properties when it routes messages to the fallback endpoint. All", "'type': '[SharedAccessSignatureAuthorizationRule]'}, 'ip_filter_rules': {'key': 'ipFilterRules', 'type': '[IpFilterRule]'}, 'provisioning_state': {'key': 'provisioningState',", "class IotHubNameAvailabilityInfo(msrest.serialization.Model): \"\"\"The properties indicating whether a given IoT hub", "be the same as the actual queue name. :type name:", "'str'}, 'current_value': {'key': 'currentValue', 'type': 'int'}, 'limit': {'key': 'limit', 'type':", ":ivar disabled_device_count: The count of disabled devices in the identity", "'str'}, 'unit': {'key': 'unit', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type':", "self.id = None self.name = None self.type = None self.location", "{ 'id': {'readonly': True}, 'name': {'readonly': True}, 'etag': {'readonly': True},", "'minimum': {'key': 'minimum', 'type': 'long'}, 'maximum': {'key': 'maximum', 'type': 'long'},", "The messaging endpoint properties for the file upload notification queue.", "property is not set, the messages which do not meet", "1 custom endpoint is allowed across all endpoint types for", "= kwargs.get('value', None) self.next_link = None class MatchedRoute(msrest.serialization.Model): \"\"\"Routes that", "the enrichment is applied to the message. :type endpoint_names: list[str]", "\"Basic\". :vartype tier: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuTier :param capacity: The number", "'current_value': {'key': 'currentValue', 'type': 'int'}, 'limit': {'key': 'limit', 'type': 'int'},", "in the response body, it must also be provided as", "~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuTier :param capacity: The number of provisioned IoT Hub units.", "date and time. :vartype expiry: ~datetime.datetime :ivar thumbprint: The certificate's", "twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _validation = { 'route': {'required': True}, }", "is available. :vartype name_available: bool :ivar reason: The reason for", "'name': {'key': 'name', 'type': 'str'}, 'source': {'key': 'source', 'type': 'str'},", "Severity of the route error. Possible values include: \"error\", \"warning\".", ":type result: str or ~azure.mgmt.iothub.v2019_11_04.models.TestResultStatus :param details: Detailed result of", "is not provided it will evaluate to true by default.", "None class Resource(msrest.serialization.Model): \"\"\"The common properties of an Azure resource.", ":type messaging_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.MessagingEndpointProperties] :param enable_file_upload_notifications: If True, file upload", "storage account. :type container_name: str :param file_name_format: File name format", "request. :ivar name_available: The value which indicates whether the provided", "'str'}, 'current_value': {'key': 'currentValue', 'type': 'long'}, 'max_value': {'key': 'maxValue', 'type':", "certificate: str \"\"\" _attribute_map = { 'certificate': {'key': 'certificate', 'type':", "and 'JSON'. Default value is 'avro'. Possible values include: \"Avro\",", "str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubNameUnavailabilityReason :param message: The detailed reason message. :type", "{'key': 'message', 'type': 'str'}, 'details': {'key': 'details', 'type': 'str'}, }", "total_device_count: long :ivar enabled_device_count: The count of enabled devices in", "array of JobResponse objects with a next link. Variables are", "~azure.mgmt.iothub.v2019_11_04.models.RouteProperties \"\"\" _attribute_map = { 'properties': {'key': 'properties', 'type': 'RouteProperties'},", "{ 'routing_source': {'key': 'routingSource', 'type': 'str'}, 'message': {'key': 'message', 'type':", "'str'}, 'value': {'key': 'value', 'type': 'str'}, 'endpoint_names': {'key': 'endpointNames', 'type':", "~datetime.timedelta :param ttl_as_iso8601: The period of time for which a", "{ 'compilation_errors': {'key': 'compilationErrors', 'type': '[RouteCompilationError]'}, } def __init__( self,", "with a next link. Variables are only populated by the", "populated in order to send to Azure. :ivar resource_type: The", "'total_device_count': {'readonly': True}, 'enabled_device_count': {'readonly': True}, 'disabled_device_count': {'readonly': True}, }", "allowed across all endpoint types for paid hubs and only", "int \"\"\" _validation = { 'max_delivery_count': {'maximum': 100, 'minimum': 1},", "maximum: long :ivar default: The default number of units. :vartype", "'type': 'long'}, 'default': {'key': 'default', 'type': 'long'}, 'scale_type': {'key': 'scaleType',", "= kwargs.get('localized_value', None) class Operation(msrest.serialization.Model): \"\"\"IoT Hub REST API operation.", "'expiry': {'readonly': True}, 'thumbprint': {'readonly': True}, 'is_verified': {'readonly': True}, 'created':", "The properties related to the custom endpoints to which your", "'thumbprint': {'readonly': True}, 'is_verified': {'readonly': True}, 'created': {'readonly': True}, 'updated':", "} def __init__( self, **kwargs ): super(IotHubQuotaMetricInfo, self).__init__(**kwargs) self.name =", "Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param twin: Routing Twin Reference.", "): super(CertificatePropertiesWithNonce, self).__init__(**kwargs) self.subject = None self.expiry = None self.thumbprint", "super(IotHubLocationDescription, self).__init__(**kwargs) self.location = kwargs.get('location', None) self.role = kwargs.get('role', None)", "times the IoT hub attempts to deliver a message on", "self).__init__(**kwargs) self.value = kwargs.get('value', None) self.localized_value = kwargs.get('localized_value', None) class", "IoT hub. :param lock_duration_as_iso8601: The lock duration. See: https://docs.microsoft.com/azure/iot- hub/iot-hub-devguide-file-upload.", "'type': 'RoutingTwinProperties'}, } def __init__( self, **kwargs ): super(RoutingTwin, self).__init__(**kwargs)", "message is available to consume before it is expired by", "condition: str :param endpoint_names: Required. The list of endpoints to", "severity: str or ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorSeverity :param location: Location where the route", "{'key': 'reason', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, }", "self.locations = None class IotHubQuotaMetricInfo(msrest.serialization.Model): \"\"\"Quota metrics properties. Variables are", ":vartype disabled_device_count: long \"\"\" _validation = { 'total_device_count': {'readonly': True},", "and will be ignored when sending a request. :param retention_time_in_days:", "that is evaluated to apply the routing rule. If no", "to-device-messages. :type default_ttl_as_iso8601: ~datetime.timedelta :param feedback: The properties of the", "group of the event hub endpoint. :type resource_group: str \"\"\"", ":param properties: IotHub properties. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.IotHubProperties :param sku: Required.", "True}, 'host_name': {'readonly': True}, 'locations': {'readonly': True}, } _attribute_map =", "'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRuleListResult,", "cloud-to-device messaging properties. :param max_delivery_count: The max delivery count for", "routes: The list of user-provided routing rules that the IoT", "job stopped processing. :vartype end_time_utc: ~datetime.datetime :ivar type: The type", "= None self.resource = None self.operation = None self.description =", "health data. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthData] :ivar next_link: Link to more", "IotHubSkuInfo(msrest.serialization.Model): \"\"\"Information about the SKU of the IoT hub. Variables", "a request. :ivar job_id: The job identifier. :vartype job_id: str", "str :param rights: Required. The permissions assigned to the shared", "quota metric. :vartype max_value: long \"\"\" _validation = { 'name':", "None self.message = kwargs.get('message', None) class IotHubProperties(msrest.serialization.Model): \"\"\"The properties of", "{ 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True},", "_attribute_map = { 'routing_source': {'key': 'routingSource', 'type': 'str'}, 'message': {'key':", "list[~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthData] :ivar next_link: Link to more results. :vartype next_link: str", "{'key': 'scaleType', 'type': 'str'}, } def __init__( self, **kwargs ):", "array of IotHubSkuDescription. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuDescription] :ivar next_link: The next", "Azure region. :type location: str :param role: The role of", "than one storage account causes an error to be thrown.", "): super(IotHubCapacity, self).__init__(**kwargs) self.minimum = None self.maximum = None self.default", "of an X509 CA Certificate. Variables are only populated by", "happened. :type location: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorRange \"\"\" _attribute_map = { 'message': {'key':", "__init__( self, **kwargs ): super(RoutingEndpoints, self).__init__(**kwargs) self.service_bus_queues = kwargs.get('service_bus_queues', None)", "base-64 representation of X509 certificate .cer file or just .pem", "The container need not exist but should be creatable using", "of IoT Hub operations supported by the Microsoft.Devices resource provider.", "self.resource = None self.operation = None self.description = None class", "fallback route. If the condition is not provided it will", "_validation = { 'connection_string': {'required': True}, 'name': {'required': True, 'pattern':", "kwargs.get('details', None) class TestRouteResultDetails(msrest.serialization.Model): \"\"\"Detailed result of testing a route.", "upload notifications are enabled. :type enable_file_upload_notifications: bool :param cloud_to_device: The", "contains the IP address range in CIDR notation for the", "'object'}, 'properties': {'key': 'properties', 'type': 'RoutingTwinProperties'}, } def __init__( self,", "{'key': 'name', 'type': 'str'}, 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, 'resource_group':", "name. :vartype path: str :ivar endpoint: The Event Hub-compatible endpoint.", "of the provisioned Event Hub-compatible endpoint used by the IoT", "type: str :ivar etag: The etag. :vartype etag: str \"\"\"", "'job_id': {'readonly': True}, 'start_time_utc': {'readonly': True}, 'end_time_utc': {'readonly': True}, 'type':", "'type': 'Name'}, } def __init__( self, **kwargs ): super(UserSubscriptionQuota, self).__init__(**kwargs)", "endpoint used by the IoT hub. Variables are only populated", "= { 'name': {'key': 'name', 'type': 'str'}, 'tier': {'key': 'tier',", "endpoint properties for the file upload notification queue. :type messaging_endpoints:", "kwargs.get('max_delivery_count', None) class Name(msrest.serialization.Model): \"\"\"Name of Iot Hub type. :param", "'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'},", "{'readonly': True}, 'location': {'required': True}, 'sku': {'required': True}, } _attribute_map", ":ivar next_link: :vartype next_link: str \"\"\" _validation = { 'next_link':", "'routing': {'key': 'routing', 'type': 'RoutingProperties'}, 'storage_endpoints': {'key': 'storageEndpoints', 'type': '{StorageEndpointProperties}'},", "'provider': {'key': 'provider', 'type': 'str'}, 'resource': {'key': 'resource', 'type': 'str'},", "self.is_verified = None self.created = None self.updated = None self.certificate", "be ignored when sending a request. :ivar name_available: The value", "the reason for the failure. :vartype failure_reason: str :ivar status_message:", "system_properties: System properties. :type system_properties: dict[str, str] \"\"\" _attribute_map =", "stopped processing. :vartype end_time_utc: ~datetime.datetime :ivar type: The type of", ":type properties: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties \"\"\" _attribute_map = { 'properties': {'key': 'properties',", ":ivar verification_code: The certificate's verification code that will be used", ":type connection_string: str :param container_name: Required. The name of the", "= None self.details = None class EventHubConsumerGroupInfo(msrest.serialization.Model): \"\"\"The properties of", ":param source: Required. The source that the routing rule is", "self.body = kwargs.get('body', None) self.app_properties = kwargs.get('app_properties', None) self.system_properties =", "'rfc-1123'}, 'end_time_utc': {'key': 'endTimeUtc', 'type': 'rfc-1123'}, 'type': {'key': 'type', 'type':", "self.properties = kwargs.get('properties', None) class RoutingTwinProperties(msrest.serialization.Model): \"\"\"RoutingTwinProperties. :param desired: Twin", "a request. :param value: List of consumer groups objects. :type", "'int'}, } def __init__( self, **kwargs ): super(MessagingEndpointProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601", ":type etag: str :param properties: IotHub properties. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.IotHubProperties", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages. :type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601: The period of", "a storage container endpoint. All required parameters must be populated", "key or name for the enrichment property. :type key: str", "update date and time. :vartype updated: ~datetime.datetime :ivar verification_code: The", "be unique across endpoint types. :type name: str :param subscription_id:", "\"running\", \"completed\", \"failed\", \"cancelled\". :vartype status: str or ~azure.mgmt.iothub.v2019_11_04.models.JobStatus :ivar", "'http_status_code': {'key': 'httpStatusCode', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'},", "'sku': {'required': True}, } _attribute_map = { 'id': {'key': 'id',", "partition_ids: The partition ids in the Event Hub-compatible endpoint. :vartype", "to which the messages that satisfy the condition are routed", "'enable_file_upload_notifications': {'key': 'enableFileUploadNotifications', 'type': 'bool'}, 'cloud_to_device': {'key': 'cloudToDevice', 'type': 'CloudToDeviceProperties'},", "{'required': True}, } _attribute_map = { 'export_blob_container_uri': {'key': 'exportBlobContainerUri', 'type':", "kwargs.get('tags', None) class TestAllRoutesInput(msrest.serialization.Model): \"\"\"Input for testing all routes. :param", "Required. Used to specify whether a route is enabled. :type", "{'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs ):", "None self.capacity = kwargs.get('capacity', None) class IpFilterRule(msrest.serialization.Model): \"\"\"The IP filter", "RegistryWrite\", \"RegistryRead, ServiceConnect\", \"RegistryRead, DeviceConnect\", \"RegistryWrite, ServiceConnect\", \"RegistryWrite, DeviceConnect\", \"ServiceConnect,", "to the fallback endpoint. All required parameters must be populated", "{'key': 'location', 'type': 'RouteErrorRange'}, } def __init__( self, **kwargs ):", "'type': {'readonly': True}, 'status': {'readonly': True}, 'failure_reason': {'readonly': True}, 'status_message':", "the route error happened. :type start: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition :param end: End", ":param lock_duration_as_iso8601: The lock duration. See: https://docs.microsoft.com/azure/iot- hub/iot-hub-devguide-file-upload. :type lock_duration_as_iso8601:", ":param name: Required. The name of the route. The name", "of the feedback queue for cloud-to-device messages. :param lock_duration_as_iso8601: The", "'bool'}, 'created': {'key': 'created', 'type': 'rfc-1123'}, 'updated': {'key': 'updated', 'type':", "RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to service bus queue endpoint types.", "'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__(", "'result': {'key': 'result', 'type': 'str'}, 'details': {'key': 'details', 'type': 'TestRouteResultDetails'},", "values include: \"unknown\", \"enqueued\", \"running\", \"completed\", \"failed\", \"cancelled\". :vartype status:", "device-to-cloud messages in days. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages. :type retention_time_in_days: long :param", "'type': 'CertificateProperties'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name',", "messages as expected. The 'unhealthy' status shows that the endpoint", "None) self.feedback = kwargs.get('feedback', None) class EndpointHealthData(msrest.serialization.Model): \"\"\"The health data", "'type': 'str'}, } def __init__( self, **kwargs ): super(FailoverInput, self).__init__(**kwargs)", "super(IotHubQuotaMetricInfo, self).__init__(**kwargs) self.name = None self.current_value = None self.max_value =", "{ 'key': {'key': 'key', 'type': 'str'}, 'value': {'key': 'value', 'type':", "the host. :vartype host_name: str :param event_hub_endpoints: The Event Hub-compatible", "'type': 'RoutingProperties'}, 'storage_endpoints': {'key': 'storageEndpoints', 'type': '{StorageEndpointProperties}'}, 'messaging_endpoints': {'key': 'messagingEndpoints',", "to. For example, DeviceMessages. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\",", "kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) class RoutingServiceBusTopicEndpointProperties(msrest.serialization.Model): \"\"\"The properties", "None) self.service_bus_topics = kwargs.get('service_bus_topics', None) self.event_hubs = kwargs.get('event_hubs', None) self.storage_containers", "'endpoint_names': {'required': True, 'max_items': 1, 'min_items': 1}, 'is_enabled': {'required': True},", "the MIT License. See License.txt in the project root for", "properties: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwinProperties \"\"\" _attribute_map = { 'tags': {'key': 'tags', 'type':", "= kwargs['sku'] self.capacity = kwargs['capacity'] class IotHubSkuDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array", "route error happened. :type end: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition \"\"\" _attribute_map = {", "The source that the routing rule is to be applied", "service bus topic endpoint. :type resource_group: str \"\"\" _validation =", ":type body: str :param app_properties: App properties. :type app_properties: dict[str,", "'[MatchedRoute]'}, } def __init__( self, **kwargs ): super(TestAllRoutesResult, self).__init__(**kwargs) self.routes", "value: The array of IotHubSkuDescription. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuDescription] :ivar next_link:", "= kwargs.get('unit', None) self.current_value = kwargs.get('current_value', None) self.limit = kwargs.get('limit',", "{ 'connection_string': {'required': True}, 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'container_name':", "The status of the job. Possible values include: \"unknown\", \"enqueued\",", "{'key': 'compilationErrors', 'type': '[RouteCompilationError]'}, } def __init__( self, **kwargs ):", "{'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location':", "'comments', 'type': 'str'}, 'features': {'key': 'features', 'type': 'str'}, 'locations': {'key':", "be populated in order to send to Azure. :param input_blob_container_uri:", "True}, 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, } _attribute_map = {", "actual topic name. :type name: str :param subscription_id: The subscription", ":param event_hubs: The list of Event Hubs endpoints that IoT", "{'readonly': True}, } _attribute_map = { 'minimum': {'key': 'minimum', 'type':", ":vartype max_value: long \"\"\" _validation = { 'name': {'readonly': True},", "evaluates to true by default. For grammar, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type", "dict[str, str] :ivar id: The Event Hub-compatible consumer group identifier.", "default_ttl_as_iso8601: ~datetime.timedelta :param feedback: The properties of the feedback queue", ":vartype resource_type: str :param sku: Required. The type of the", "the event hub endpoint. :type connection_string: str :param name: Required.", "endpoint: str \"\"\" _validation = { 'partition_ids': {'readonly': True}, 'path':", "= kwargs['value'] self.endpoint_names = kwargs['endpoint_names'] class ErrorDetails(msrest.serialization.Model): \"\"\"Error details. Variables", "self, **kwargs ): super(CertificateVerificationDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None) class", "self.column = kwargs.get('column', None) class RouteErrorRange(msrest.serialization.Model): \"\"\"Range of route errors.", "None) self.features = kwargs.get('features', None) self.locations = None class IotHubQuotaMetricInfo(msrest.serialization.Model):", "**kwargs ): super(FallbackRouteProperties, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.source =", "when sending a request. :ivar code: The error code. :vartype", "= None self.type = None self.location = kwargs['location'] self.tags =", "container where you upload files. The container need not exist", "the route error happened. :param line: Line where the route", "self.value = kwargs.get('value', None) self.next_link = None class EventHubProperties(msrest.serialization.Model): \"\"\"The", "to this dictionary is events. This key has to be", "def __init__( self, **kwargs ): super(RouteCompilationError, self).__init__(**kwargs) self.message = kwargs.get('message',", "{'key': 'location', 'type': 'str'}, 'role': {'key': 'role', 'type': 'str'}, }", ":param routing: The routing related properties of the IoT hub.", "_attribute_map = { 'key': {'key': 'key', 'type': 'str'}, 'value': {'key':", "the storage account. :type container_name: str :param file_name_format: File name", "start time of the job. :vartype start_time_utc: ~datetime.datetime :ivar end_time_utc:", "None) self.health_status = kwargs.get('health_status', None) class EndpointHealthDataListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array", "duration. See: https://docs.microsoft.com/azure/iot- hub/iot-hub-devguide-file-upload. :type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601: The", "self.value = kwargs.get('value', None) self.next_link = None class MatchedRoute(msrest.serialization.Model): \"\"\"Routes", "'cloudToDevice', 'type': 'CloudToDeviceProperties'}, 'comments': {'key': 'comments', 'type': 'str'}, 'features': {'key':", "{ 'message': {'key': 'message', 'type': 'RoutingMessage'}, 'route': {'key': 'route', 'type':", "None self.name = None self.type = None self.location = kwargs['location']", "this IoT hub. :param lock_duration_as_iso8601: The lock duration. See: https://docs.microsoft.com/azure/iot-", "'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def __init__(", "of IotHubSkuDescription objects with a next link. Variables are only", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type default_ttl_as_iso8601: ~datetime.timedelta :param feedback: The properties", "**kwargs ): super(CloudToDeviceProperties, self).__init__(**kwargs) self.max_delivery_count = kwargs.get('max_delivery_count', None) self.default_ttl_as_iso8601 =", "'str'}, 'message': {'key': 'message', 'type': 'RoutingMessage'}, 'twin': {'key': 'twin', 'type':", "1 endpoint is allowed. :type endpoint_names: list[str] :param is_enabled: Required.", "property is set to True, causes an error to be", "be ignored when sending a request. :param value: List of", "Iot Hub type. :param value: IotHub type. :type value: str", "values include: \"F1\", \"S1\", \"S2\", \"S3\", \"B1\", \"B2\", \"B3\". :type", "list[str] :param is_enabled: Required. Used to specify whether a route", "messages to the fallback endpoint. All required parameters must be", "name: The name of the quota metric. :vartype name: str", "'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): super(EndpointHealthDataListResult,", "key as $default. Specifying more than one storage account causes", "next_link: str \"\"\" _validation = { 'value': {'readonly': True}, 'next_link':", "name format for the blob. Default format is {iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All", "the fallback endpoint. All required parameters must be populated in", "certificate's subject name. :vartype subject: str :ivar expiry: The certificate's", "'duration'}, 'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'}, } def __init__( self,", "the blob. Default format is {iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All parameters are mandatory", "_validation = { 'code': {'readonly': True}, 'http_status_code': {'readonly': True}, 'message':", "a request. :param value: :type value: list[~azure.mgmt.iothub.v2019_11_04.models.UserSubscriptionQuota] :ivar next_link: :vartype", "capacity: long \"\"\" _validation = { 'name': {'required': True}, 'tier':", "count of enabled devices in the identity registry. :vartype enabled_device_count:", "reported: Twin desired properties. :type reported: object \"\"\" _attribute_map =", "Certificate. Variables are only populated by the server, and will", "sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo :param capacity: Required. IotHub capacity. :type capacity: ~azure.mgmt.iothub.v2019_11_04.models.IotHubCapacity", "RoutingEndpoints(msrest.serialization.Model): \"\"\"The properties related to the custom endpoints to which", "the scaling enabled. Possible values include: \"Automatic\", \"Manual\", \"None\". :vartype", "'source': {'key': 'source', 'type': 'str'}, 'condition': {'key': 'condition', 'type': 'str'},", "None self.http_status_code = None self.message = None self.details = None", "value: str :param localized_value: Localized value of name. :type localized_value:", "{'key': 'comments', 'type': 'str'}, 'features': {'key': 'features', 'type': 'str'}, 'locations':", "of the request to list IoT Hub operations. It contains", "} def __init__( self, **kwargs ): super(EndpointHealthDataListResult, self).__init__(**kwargs) self.value =", "True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[IotHubDescription]'},", "'type': 'str'}, 'is_verified': {'key': 'isVerified', 'type': 'bool'}, 'created': {'key': 'created',", "for this property when the enableFileUploadNotifications property is set to", "of storage container in the storage account. :type container_name: str", "delivery count for cloud-to-device messages in the device queue. See:", "link. :vartype next_link: str \"\"\" _validation = { 'next_link': {'readonly':", "\"Reject\". :type action: str or ~azure.mgmt.iothub.v2019_11_04.models.IpFilterActionType :param ip_mask: Required. A", "CIDR notation for the rule. :type ip_mask: str \"\"\" _validation", "} def __init__( self, **kwargs ): super(TestAllRoutesResult, self).__init__(**kwargs) self.routes =", "the IoT hub can failover to. Possible values include: \"primary\",", "= { 'id': {'key': 'id', 'type': 'str'}, 'type': {'key': 'type',", "'type': 'str'}, } def __init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRule, self).__init__(**kwargs)", "resource provider. :vartype value: list[~azure.mgmt.iothub.v2019_11_04.models.Operation] :ivar next_link: URL to get", "reference input parameter. This is an optional parameter. :param tags:", "'RoutingTwin'}, } def __init__( self, **kwargs ): super(TestAllRoutesInput, self).__init__(**kwargs) self.routing_source", "self.total_device_count = None self.enabled_device_count = None self.disabled_device_count = None class", "**kwargs ): super(RegistryStatistics, self).__init__(**kwargs) self.total_device_count = None self.enabled_device_count = None", "endpoint. :param endpoint_id: Id of the endpoint. :type endpoint_id: str", "source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition which is", "True}, 'disabled_device_count': {'readonly': True}, } _attribute_map = { 'total_device_count': {'key':", "account to which files are uploaded. :type connection_string: str :param", "grammar, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param endpoint_names: Required. The", "unit: str :param current_value: Current number of IotHub type. :type", "be thrown. Not specifying a value for this property when", ":param rights: Required. The permissions assigned to the shared access", "dict[str, str] :param system_properties: System properties. :type system_properties: dict[str, str]", ":param cloud_to_device: The IoT hub cloud-to-device messaging properties. :type cloud_to_device:", "= { 'compilation_errors': {'key': 'compilationErrors', 'type': '[RouteCompilationError]'}, } def __init__(", "an enrichment that your IoT hub applies to messages delivered", "OperationDisplay(msrest.serialization.Model): \"\"\"The object that represents the operation. Variables are only", "custom endpoint is allowed across all endpoint types for free", "'tier': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name',", "custom endpoints to which your IoT hub routes messages based", "queue endpoint types. All required parameters must be populated in", "\"\"\"The list of shared access policies with a next link.", "10 custom endpoints are allowed across all endpoint types for", "custom endpoints. A maximum of 100 routing rules are allowed", "allowed for paid hubs and a maximum of 5 routing", "\"ServiceConnect\", \"DeviceConnect\", \"RegistryRead, RegistryWrite\", \"RegistryRead, ServiceConnect\", \"RegistryRead, DeviceConnect\", \"RegistryWrite, ServiceConnect\",", "self.value = None self.next_link = None class RegistryStatistics(msrest.serialization.Model): \"\"\"Identity registry", "end: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition \"\"\" _attribute_map = { 'start': {'key': 'start', 'type':", "region. :type location: str :param role: The role of the", "super(SharedAccessSignatureAuthorizationRuleListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class", "'type': 'str'}, } def __init__( self, **kwargs ): super(ImportDevicesRequest, self).__init__(**kwargs)", "endpoint. :type fallback_route: ~azure.mgmt.iothub.v2019_11_04.models.FallbackRouteProperties :param enrichments: The list of user-provided", "an event hub endpoint. All required parameters must be populated", "'type': 'str'}, } def __init__( self, **kwargs ): super(CertificateBodyDescription, self).__init__(**kwargs)", "'tier', 'type': 'str'}, 'capacity': {'key': 'capacity', 'type': 'long'}, } def", "The capabilities and features enabled for the IoT hub. Possible", "that is used to serialize messages to blobs. Supported values", "identifier. :vartype id: str :ivar name: The resource name. :vartype", "resource_group: The name of the resource group of the event", "endpoint_names: list[str] :param is_enabled: Required. Used to specify whether the", "{'readonly': True}, 'disabled_device_count': {'readonly': True}, } _attribute_map = { 'total_device_count':", "locations: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubLocationDescription] \"\"\" _validation = { 'provisioning_state': {'readonly': True}, 'state':", "= None self.etag = None class EventHubConsumerGroupsListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array", "evaluated in order to apply the fallback route. If the", "The type of the scaling enabled. Possible values include: \"Automatic\",", "str or ~azure.mgmt.iothub.v2019_11_04.models.IpFilterActionType :param ip_mask: Required. A string that contains", "feedback: ~azure.mgmt.iothub.v2019_11_04.models.FeedbackProperties \"\"\" _validation = { 'max_delivery_count': {'maximum': 100, 'minimum':", ":param endpoint_names: Required. The list of endpoints to which messages", "types for free hubs. :type endpoints: ~azure.mgmt.iothub.v2019_11_04.models.RoutingEndpoints :param routes: The", ":param details: Detailed result of testing route. :type details: ~azure.mgmt.iothub.v2019_11_04.models.TestRouteResultDetails", "100, 'minimum': 1}, } _attribute_map = { 'lock_duration_as_iso8601': {'key': 'lockDurationAsIso8601',", "tag. :vartype etag: str :ivar type: The resource type. :vartype", "etag. :vartype etag: str \"\"\" _validation = { 'id': {'readonly':", "set, the messages which do not meet any of the", "response. Variables are only populated by the server, and will", "'enabledDeviceCount', 'type': 'long'}, 'disabled_device_count': {'key': 'disabledDeviceCount', 'type': 'long'}, } def", ":type details: ~azure.mgmt.iothub.v2019_11_04.models.TestRouteResultDetails \"\"\" _attribute_map = { 'result': {'key': 'result',", "'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type':", "kwargs.get('properties', None) self.sku = kwargs['sku'] class IotHubDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array", "Hubs endpoint. :type event_hubs: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingEventHubProperties] :param storage_containers: The list of", "receiving device-to-cloud messages in the Event Hub-compatible endpoint. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-", "for the enrichment property. :type value: str :param endpoint_names: Required.", "Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type source:", "and that MUST have its key as $default. Specifying more", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages. :type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601: The period of time", "= kwargs.get('end', None) class RouteProperties(msrest.serialization.Model): \"\"\"The properties of a routing", "\"unknown\", \"enqueued\", \"running\", \"completed\", \"failed\", \"cancelled\". :vartype status: str or", "'type': '[str]'}, 'path': {'key': 'path', 'type': 'str'}, 'endpoint': {'key': 'endpoint',", "= { 'value': {'key': 'value', 'type': '[Operation]'}, 'next_link': {'key': 'nextLink',", "attempts to deliver a message on the feedback queue. See:", "The count of enabled devices in the identity registry. :vartype", "Twin desired properties. :type reported: object \"\"\" _attribute_map = {", "'type': 'str'}, 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, 'rights': {'key': 'rights',", "a route. :param compilation_errors: JSON-serialized list of route compilation errors.", "'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } def", "type: str :param location: Required. The resource location. :type location:", "{'key': 'value', 'type': '[EventHubConsumerGroupInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, }", "= None class OperationInputs(msrest.serialization.Model): \"\"\"Input values. All required parameters must", "Required. The value indicating whether keys should be excluded during", "def __init__( self, **kwargs ): super(IotHubProperties, self).__init__(**kwargs) self.authorization_policies = kwargs.get('authorization_policies',", "tier: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuTier :param capacity: The number of provisioned", "Hub has not established a connection with the endpoint. No", "\"\"\" _validation = { 'value': {'readonly': True}, 'next_link': {'readonly': True},", "'name', 'type': 'Name'}, } def __init__( self, **kwargs ): super(UserSubscriptionQuota,", "str :param action: Required. The desired action for requests captured", "'type': 'str'}, } def __init__( self, **kwargs ): super(UserSubscriptionQuotaListResult, self).__init__(**kwargs)", "Hub-compatible consumer group identifier. :vartype id: str :ivar name: The", "__init__( self, **kwargs ): super(IotHubQuotaMetricInfo, self).__init__(**kwargs) self.name = None self.current_value", "None) class Name(msrest.serialization.Model): \"\"\"Name of Iot Hub type. :param value:", "__init__( self, **kwargs ): super(TestRouteInput, self).__init__(**kwargs) self.message = kwargs.get('message', None)", "{'key': 'ipFilterRules', 'type': '[IpFilterRule]'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'state':", "{'key': 'value', 'type': 'str'}, 'endpoint_names': {'key': 'endpointNames', 'type': '[str]'}, }", "_attribute_map = { 'value': {'key': 'value', 'type': '[IotHubQuotaMetricInfo]'}, 'next_link': {'key':", "Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _validation = { 'route': {'required':", "None) class IotHubDescription(Resource): \"\"\"The description of the IoT hub. Variables", "dict[str, str] \"\"\" _attribute_map = { 'body': {'key': 'body', 'type':", "subscription_id: The subscription identifier of the service bus queue endpoint.", "\"None\", \"DeviceManagement\". :type features: str or ~azure.mgmt.iothub.v2019_11_04.models.Capabilities :ivar locations: Primary", "endpoint types. The name need not be the same as", "value: The list of shared access policies. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule]", "{'required': True}, 'batch_frequency_in_seconds': {'maximum': 720, 'minimum': 60}, 'max_chunk_size_in_bytes': {'maximum': 524288000,", "} _attribute_map = { 'retention_time_in_days': {'key': 'retentionTimeInDays', 'type': 'long'}, 'partition_count':", "'capacity': {'key': 'capacity', 'type': 'IotHubCapacity'}, } def __init__( self, **kwargs", "'enrichments': {'key': 'enrichments', 'type': '[EnrichmentProperties]'}, } def __init__( self, **kwargs", "{'key': 'excludeKeys', 'type': 'bool'}, } def __init__( self, **kwargs ):", "to service bus topic endpoint types. All required parameters must", "self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class IotHubLocationDescription(msrest.serialization.Model):", "hub uses to route messages to built-in and custom endpoints.", "True}, 'exclude_keys': {'required': True}, } _attribute_map = { 'export_blob_container_uri': {'key':", "X509 leaf certificate .cer file or just .pem file content.", "The billing tier for the IoT hub. Possible values include:", ":type tags: dict[str, str] \"\"\" _attribute_map = { 'tags': {'key':", "be ignored when sending a request. :param properties: The tags.", "provided in the response body, it must also be provided", "name: The resource name. :vartype name: str :ivar type: The", "'name': {'key': 'name', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'},", "'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, } def __init__(", "'str'}, 'type': {'key': 'type', 'type': 'str'}, 'unit': {'key': 'unit', 'type':", "'enableFileUploadNotifications', 'type': 'bool'}, 'cloud_to_device': {'key': 'cloudToDevice', 'type': 'CloudToDeviceProperties'}, 'comments': {'key':", "*not* required. If it is provided in the response body,", "the messaging endpoints used by this IoT hub. :param lock_duration_as_iso8601:", "'type': '[EndpointHealthData]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(", "self.container_name = kwargs['container_name'] self.file_name_format = kwargs.get('file_name_format', None) self.batch_frequency_in_seconds = kwargs.get('batch_frequency_in_seconds',", "group of the service bus queue endpoint. :type resource_group: str", "{'key': 'value', 'type': '[IotHubQuotaMetricInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, }", "{'key': 'thumbprint', 'type': 'str'}, 'is_verified': {'key': 'isVerified', 'type': 'bool'}, 'created':", "True, 'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly': True}, 'location': {'required': True}, 'sku':", "kwargs.get('tags', None) class IotHubDescription(Resource): \"\"\"The description of the IoT hub.", "'type': 'bool'}, } def __init__( self, **kwargs ): super(RouteProperties, self).__init__(**kwargs)", "self.encoding = kwargs.get('encoding', None) class RoutingTwin(msrest.serialization.Model): \"\"\"Twin reference input parameter.", "routes. :param routing_source: Routing source. Possible values include: \"Invalid\", \"DeviceMessages\",", "'type': 'str'}, 'unit': {'key': 'unit', 'type': 'str'}, 'current_value': {'key': 'currentValue',", "SAS URI generated by IoT Hub for file upload is", "and will be lost if the code is regenerated. #", "'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubCapacity, self).__init__(**kwargs)", ":type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param twin: Routing Twin Reference. :type twin:", "location: str :param tags: A set of tags. The resource", "} def __init__( self, **kwargs ): super(FailoverInput, self).__init__(**kwargs) self.failover_region =", "= { 'total_device_count': {'key': 'totalDeviceCount', 'type': 'long'}, 'enabled_device_count': {'key': 'enabledDeviceCount',", "send to Azure. :param failover_region: Required. Region the hub will", "'type': 'str'}, 'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'}, 'capacity': {'key': 'capacity',", "The name of the resource group of the event hub", "routing rules. :type service_bus_topics: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusTopicEndpointProperties] :param event_hubs: The list of", "for which the enrichment is applied to the message. :type", "and a maximum of 5 routing rules are allowed for", "None self.certificate = kwargs.get('certificate', None) class CertificatePropertiesWithNonce(msrest.serialization.Model): \"\"\"The description of", "or just .pem file content. :type certificate: str \"\"\" _attribute_map", "} def __init__( self, **kwargs ): super(TestRouteResultDetails, self).__init__(**kwargs) self.compilation_errors =", ":ivar status_message: The status message for the job. :vartype status_message:", "~azure.mgmt.iothub.v2019_11_04.models.RoutingEndpoints :param routes: The list of user-provided routing rules that", "based on the routing rules. A maximum of 10 custom", "format for the blob. Default format is {iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All parameters", "'message': {'key': 'message', 'type': 'str'}, } def __init__( self, **kwargs", "of the locations where a resource is provisioned. :param location:", "kwargs.get('storage_endpoints', None) self.messaging_endpoints = kwargs.get('messaging_endpoints', None) self.enable_file_upload_notifications = kwargs.get('enable_file_upload_notifications', None)", "message. :type endpoint_names: list[str] \"\"\" _validation = { 'key': {'required':", "None class JobResponseListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of JobResponse objects with", "resource tags. :type tags: dict[str, str] \"\"\" _validation = {", "'failureReason', 'type': 'str'}, 'status_message': {'key': 'statusMessage', 'type': 'str'}, 'parent_job_id': {'key':", "type. :type current_value: int :param limit: Numerical limit on IotHub", "True, causes an error to be thrown. :type storage_endpoints: dict[str,", "} def __init__( self, **kwargs ): super(IotHubProperties, self).__init__(**kwargs) self.authorization_policies =", "def __init__( self, **kwargs ): super(RoutingTwinProperties, self).__init__(**kwargs) self.desired = kwargs.get('desired',", "to, such as DeviceMessages. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\",", "data to this endpoint. The status of an unhealthy endpoint", "the actual queue name. :type name: str :param subscription_id: The", "long \"\"\" _validation = { 'name': {'readonly': True}, 'current_value': {'readonly':", "be populated in order to send to Azure. :ivar resource_type:", "order to send to Azure. :param connection_string: Required. The connection", "values include: \"Automatic\", \"Manual\", \"None\". :vartype scale_type: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubScaleType", "'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'source': {'required': True}, 'endpoint_names': {'required':", "kwargs.get('value', None) self.next_link = None class EnrichmentProperties(msrest.serialization.Model): \"\"\"The properties of", ":vartype end_time_utc: ~datetime.datetime :ivar type: The type of the job.", "retention_time_in_days: The retention time for device-to-cloud messages in days. See:", "The total count of devices in the identity registry. :vartype", "= kwargs.get('details', None) class TestRouteResultDetails(msrest.serialization.Model): \"\"\"Detailed result of testing a", "**kwargs ): super(IotHubSkuInfo, self).__init__(**kwargs) self.name = kwargs['name'] self.tier = None", "'routes': {'key': 'routes', 'type': '[MatchedRoute]'}, } def __init__( self, **kwargs", "self, **kwargs ): super(CertificateBodyDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None) class", "{'key': 'ipMask', 'type': 'str'}, } def __init__( self, **kwargs ):", "the job stopped processing. :vartype end_time_utc: ~datetime.datetime :ivar type: The", "self, **kwargs ): super(CertificateListDescription, self).__init__(**kwargs) self.value = kwargs.get('value', None) class", "be ignored when sending a request. :ivar code: The error", "'type': 'rfc-1123'}, 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, 'is_verified': {'key': 'isVerified',", "Hub-compatible endpoint. :vartype endpoint: str \"\"\" _validation = { 'partition_ids':", "and will be ignored when sending a request. :ivar code:", "and time. :vartype created: ~datetime.datetime :ivar updated: The certificate's last", "{'required': True}, 'rights': {'required': True}, } _attribute_map = { 'key_name':", "'str'}, 'health_status': {'key': 'healthStatus', 'type': 'str'}, } def __init__( self,", "{'key': 'start', 'type': 'RouteErrorPosition'}, 'end': {'key': 'end', 'type': 'RouteErrorPosition'}, }", "= None class RouteCompilationError(msrest.serialization.Model): \"\"\"Compilation error when evaluating route. :param", "str :ivar certificate: The certificate content. :vartype certificate: str \"\"\"", "self, **kwargs ): super(OperationDisplay, self).__init__(**kwargs) self.provider = None self.resource =", "ignored when sending a request. :ivar value: List of IoT", "\"\"\"The properties of a routing rule that your IoT hub", "kwargs.get('properties', None) class MessagingEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the messaging endpoints", "# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All", "'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, 'rights': {'key': 'rights', 'type': 'str'},", "be thrown. :type storage_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.StorageEndpointProperties] :param messaging_endpoints: The messaging", "{ 'id': {'key': 'id', 'type': 'str'}, 'type': {'key': 'type', 'type':", "route. If the condition is not provided it will evaluate", "type. :type name: ~azure.mgmt.iothub.v2019_11_04.models.Name \"\"\" _attribute_map = { 'id': {'key':", "'type': 'str'}, 'resource': {'key': 'resource', 'type': 'str'}, 'operation': {'key': 'operation',", "= kwargs.get('feedback', None) class EndpointHealthData(msrest.serialization.Model): \"\"\"The health data for an", "): super(IotHubDescription, self).__init__(**kwargs) self.etag = kwargs.get('etag', None) self.properties = kwargs.get('properties',", "of tags. Twin Tags. :type tags: object :param properties: :type", "can upload files. Currently you can configure only one Azure", "Service Bus topic endpoints that the IoT hub routes the", "Properties of routes that matched. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties \"\"\" _attribute_map", "IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :type routing: ~azure.mgmt.iothub.v2019_11_04.models.RoutingProperties :param storage_endpoints: The", ":ivar id: The resource identifier. :vartype id: str :ivar name:", "only one endpoint is allowed. :type endpoint_names: list[str] :param is_enabled:", "for device-to-cloud messages in days. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages. :type retention_time_in_days: long", "self.is_enabled = kwargs['is_enabled'] class RoutingEndpoints(msrest.serialization.Model): \"\"\"The properties related to the", "an eventually consistent state of health. The 'dead' status shows", "Hub uses these properties when it routes messages to the", "messaging endpoints used by this IoT hub. :param lock_duration_as_iso8601: The", "route compilation errors. :type compilation_errors: list[~azure.mgmt.iothub.v2019_11_04.models.RouteCompilationError] \"\"\" _attribute_map = {", "regenerated. # -------------------------------------------------------------------------- from azure.core.exceptions import HttpResponseError import msrest.serialization class", "'type': 'str'}, } def __init__( self, **kwargs ): super(EventHubConsumerGroupsListResult, self).__init__(**kwargs)", "self.code = None self.http_status_code = None self.message = None self.details", "{'key': 'resourceGroup', 'type': 'str'}, } def __init__( self, **kwargs ):", ":ivar resource_type: The type of the resource. :vartype resource_type: str", "where the route error happened. :type start: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition :param end:", ":param name: IotHub type. :type name: ~azure.mgmt.iothub.v2019_11_04.models.Name \"\"\" _attribute_map =", "'[str]'}, 'path': {'key': 'path', 'type': 'str'}, 'endpoint': {'key': 'endpoint', 'type':", "_attribute_map = { 'start': {'key': 'start', 'type': 'RouteErrorPosition'}, 'end': {'key':", "MessagingEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the messaging endpoints used by this", "route: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties :param twin: Routing Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin", "'line': {'key': 'line', 'type': 'int'}, 'column': {'key': 'column', 'type': 'int'},", "health_status: str or ~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthStatus \"\"\" _attribute_map = { 'endpoint_id': {'key':", "\"\"\" _validation = { 'next_link': {'readonly': True}, } _attribute_map =", "IotHub type. :type value: str :param localized_value: Localized value of", "list of Service Bus topic endpoints that the IoT hub", ":type cloud_to_device: ~azure.mgmt.iothub.v2019_11_04.models.CloudToDeviceProperties :param comments: IoT hub comments. :type comments:", "code: str :ivar http_status_code: The HTTP status code. :vartype http_status_code:", "} def __init__( self, **kwargs ): super(ExportDevicesRequest, self).__init__(**kwargs) self.export_blob_container_uri =", "\"\"\" _validation = { 'max_delivery_count': {'maximum': 100, 'minimum': 1}, }", "identify errors and monitor issues with endpoints. The 'unknown' status", "RouteCompilationError(msrest.serialization.Model): \"\"\"Compilation error when evaluating route. :param message: Route error", "**kwargs ): super(IotHubCapacity, self).__init__(**kwargs) self.minimum = None self.maximum = None", "self.type = kwargs.get('type', None) self.unit = kwargs.get('unit', None) self.current_value =", "property. :type key: str :param value: Required. The value for", "fall-back route when none of the conditions specified in the", "kwargs['key_name'] self.primary_key = kwargs.get('primary_key', None) self.secondary_key = kwargs.get('secondary_key', None) self.rights", "the quota metric. :vartype max_value: long \"\"\" _validation = {", "blob container URI. :type export_blob_container_uri: str :param exclude_keys: Required. The", "populated by the server, and will be ignored when sending", "desired properties. :type desired: object :param reported: Twin desired properties.", "'value': {'key': 'value', 'type': '[EndpointHealthData]'}, 'next_link': {'key': 'nextLink', 'type': 'str'},", "days. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages. :type retention_time_in_days: long :param partition_count: The number", ":type name: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSku :ivar tier: The billing tier", "'retention_time_in_days': {'key': 'retentionTimeInDays', 'type': 'long'}, 'partition_count': {'key': 'partitionCount', 'type': 'int'},", "_attribute_map = { 'export_blob_container_uri': {'key': 'exportBlobContainerUri', 'type': 'str'}, 'exclude_keys': {'key':", "Possible values include: \"RegistryRead\", \"RegistryWrite\", \"ServiceConnect\", \"DeviceConnect\", \"RegistryRead, RegistryWrite\", \"RegistryRead,", "when sending a request. :param properties: The description of an", "'operation': {'readonly': True}, 'description': {'readonly': True}, } _attribute_map = {", "reason for unavailability. Possible values include: \"Invalid\", \"AlreadyExists\". :vartype reason:", "hub attempts to deliver a message. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload. :type max_delivery_count:", "'status': {'key': 'status', 'type': 'str'}, 'failure_reason': {'key': 'failureReason', 'type': 'str'},", "None) self.cloud_to_device = kwargs.get('cloud_to_device', None) self.comments = kwargs.get('comments', None) self.features", "The certificate's verification code that will be used for proof", "health. The 'dead' status shows that the endpoint is not", "__init__( self, **kwargs ): super(TagsResource, self).__init__(**kwargs) self.tags = kwargs.get('tags', None)", "'sku', 'type': 'IotHubSkuInfo'}, } def __init__( self, **kwargs ): super(IotHubDescription,", "IoT hub routes messages to, based on the routing rules.", "subscription identifier of the storage account. :type subscription_id: str :param", "all endpoint types for paid hubs and only 1 custom", "self, **kwargs ): super(IotHubSkuDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link", "the resource. :vartype resource_type: str :param sku: Required. The type", "messaging endpoint properties for the file upload notification queue. :type", "Name of the operation. :vartype operation: str :ivar description: Description", "include: \"error\", \"warning\". :type severity: str or ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorSeverity :param location:", "Endpoint names must be unique across endpoint types. :type name:", "topic name. :type name: str :param subscription_id: The subscription identifier", "the Proof-Of-Possession flow. Variables are only populated by the server,", "generated by IoT Hub for file upload is valid. See:", "name: Required. The name of the route. The name can", "'int'}, } def __init__( self, **kwargs ): super(FeedbackProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601", "'encoding', 'type': 'str'}, } def __init__( self, **kwargs ): super(RoutingStorageContainerProperties,", "= { 'certificate': {'key': 'certificate', 'type': 'str'}, } def __init__(", "last update date and time. :vartype updated: ~datetime.datetime :ivar verification_code:", "'start', 'type': 'RouteErrorPosition'}, 'end': {'key': 'end', 'type': 'RouteErrorPosition'}, } def", "'capacity', 'type': 'IotHubCapacity'}, } def __init__( self, **kwargs ): super(IotHubSkuDescription,", "indicates whether the provided name is available. :vartype name_available: bool", "when sending a request. All required parameters must be populated", "of the operation. :vartype operation: str :ivar description: Description of", "be creatable using the connectionString specified. :type container_name: str \"\"\"", "one Azure Storage account and that MUST have its key", "it must also be provided as a header per the", "the resource group of the service bus topic endpoint. :type", "dict[str, ~azure.mgmt.iothub.v2019_11_04.models.MessagingEndpointProperties] :param enable_file_upload_notifications: If True, file upload notifications are", "\"completed\", \"failed\", \"cancelled\". :vartype status: str or ~azure.mgmt.iothub.v2019_11_04.models.JobStatus :ivar failure_reason:", "{'key': 'code', 'type': 'str'}, 'http_status_code': {'key': 'httpStatusCode', 'type': 'str'}, 'message':", "the fallback route is enabled. :type is_enabled: bool \"\"\" _validation", "identity registry. :vartype enabled_device_count: long :ivar disabled_device_count: The count of", "the enableFileUploadNotifications property is set to True, causes an error", "'action': {'key': 'action', 'type': 'str'}, 'ip_mask': {'key': 'ipMask', 'type': 'str'},", "self.name = None self.display = kwargs.get('display', None) class OperationDisplay(msrest.serialization.Model): \"\"\"The", "The resource tags. :type tags: dict[str, str] :param etag: The", "message: str \"\"\" _validation = { 'name_available': {'readonly': True}, 'reason':", "the Microsoft.Devices resource provider. :vartype value: list[~azure.mgmt.iothub.v2019_11_04.models.Operation] :ivar next_link: URL", ":type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo \"\"\" _validation = { 'id': {'readonly': True},", "True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'},", "hub state. :vartype state: str :ivar host_name: The name of", "} def __init__( self, **kwargs ): super(RoutingTwinProperties, self).__init__(**kwargs) self.desired =", "entity tag. :vartype etag: str :ivar type: The resource type.", "~datetime.datetime :ivar updated: The certificate's last update date and time.", "'type': 'str'}, 'host_name': {'key': 'hostName', 'type': 'str'}, 'event_hub_endpoints': {'key': 'eventHubEndpoints',", "} _attribute_map = { 'total_device_count': {'key': 'totalDeviceCount', 'type': 'long'}, 'enabled_device_count':", "FallbackRouteProperties(msrest.serialization.Model): \"\"\"The properties of the fallback route. IoT Hub uses", "class IotHubCapacity(msrest.serialization.Model): \"\"\"IoT Hub capacity information. Variables are only populated", "status: str or ~azure.mgmt.iothub.v2019_11_04.models.JobStatus :ivar failure_reason: If status == failed,", "str or ~azure.mgmt.iothub.v2019_11_04.models.Capabilities :ivar locations: Primary and secondary location for", "where the IoT hub is currently provisioned. The secondary region", "'long'}, 'partition_count': {'key': 'partitionCount', 'type': 'int'}, 'partition_ids': {'key': 'partitionIds', 'type':", "None) self.message = kwargs.get('message', None) self.twin = kwargs.get('twin', None) class", "properties related to service bus topic endpoint types. All required", "free hubs. :type endpoints: ~azure.mgmt.iothub.v2019_11_04.models.RoutingEndpoints :param routes: The list of", "'value', 'type': '[Operation]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def", ":ivar subject: The certificate's subject name. :vartype subject: str :ivar", "kwargs.get('line', None) self.column = kwargs.get('column', None) class RouteErrorRange(msrest.serialization.Model): \"\"\"Range of", "~datetime.datetime :ivar type: The type of the job. Possible values", "processing. :vartype end_time_utc: ~datetime.datetime :ivar type: The type of the", "'storage_containers': {'key': 'storageContainers', 'type': '[RoutingStorageContainerProperties]'}, } def __init__( self, **kwargs", "self.sku = kwargs['sku'] self.capacity = kwargs['capacity'] class IotHubSkuDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized", "the same as the actual topic name. :type name: str", "self.storage_containers = kwargs.get('storage_containers', None) class RoutingEventHubProperties(msrest.serialization.Model): \"\"\"The properties related to", "~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param twin: Routing Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\"", "{ 'value': {'key': 'value', 'type': '[IotHubSkuDescription]'}, 'next_link': {'key': 'nextLink', 'type':", "name of the shared access policy. :type key_name: str :param", "when sending a request. :param value: The array of JobResponse", "'RoutingMessage'}, 'route': {'key': 'route', 'type': 'RouteProperties'}, 'twin': {'key': 'twin', 'type':", "Variables are only populated by the server, and will be", "'value': {'key': 'value', 'type': '[JobResponse]'}, 'next_link': {'key': 'nextLink', 'type': 'str'},", ":type endpoint_names: list[str] :param is_enabled: Required. Used to specify whether", "the storage account. :type resource_group: str :param container_name: Required. The", "None) class TestAllRoutesResult(msrest.serialization.Model): \"\"\"Result of testing all routes. :param routes:", "across endpoint types. :type name: str :param subscription_id: The subscription", "'container_name': {'key': 'containerName', 'type': 'str'}, } def __init__( self, **kwargs", "is currently provisioned. The secondary region is the Azure disaster", "'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'},", "of 5 routing rules are allowed for free hubs. :type", "the endpoint is accepting messages as expected. The 'unhealthy' status", "represents the operation. Variables are only populated by the server,", "the routing rules. :type service_bus_topics: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusTopicEndpointProperties] :param event_hubs: The list", "where the route error happened. :type location: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorRange \"\"\" _attribute_map", "= None self.operation = None self.description = None class OperationInputs(msrest.serialization.Model):", "'keyName', 'type': 'str'}, 'primary_key': {'key': 'primaryKey', 'type': 'str'}, 'secondary_key': {'key':", "endpoint is allowed. :type endpoint_names: list[str] :param is_enabled: Required. Used", "None class EnrichmentProperties(msrest.serialization.Model): \"\"\"The properties of an enrichment that your", "'type': '[RoutingServiceBusQueueEndpointProperties]'}, 'service_bus_topics': {'key': 'serviceBusTopics', 'type': '[RoutingServiceBusTopicEndpointProperties]'}, 'event_hubs': {'key': 'eventHubs',", "The Event Hub-compatible endpoint. :vartype endpoint: str \"\"\" _validation =", "= None self.scale_type = None class Resource(msrest.serialization.Model): \"\"\"The common properties", "= None self.path = None self.endpoint = None class ExportDevicesRequest(msrest.serialization.Model):", "str :param subscription_id: The subscription identifier of the event hub", "'ip_mask': {'key': 'ipMask', 'type': 'str'}, } def __init__( self, **kwargs", "= None self.name = None self.etag = None self.type =", "'health_status': {'key': 'healthStatus', 'type': 'str'}, } def __init__( self, **kwargs", "self.limit = kwargs.get('limit', None) self.name = kwargs.get('name', None) class UserSubscriptionQuotaListResult(msrest.serialization.Model):", "__init__( self, **kwargs ): super(MatchedRoute, self).__init__(**kwargs) self.properties = kwargs.get('properties', None)", "bus topic endpoint. :type subscription_id: str :param resource_group: The name", "the IoT hub. Possible values include: \"Free\", \"Standard\", \"Basic\". :vartype", "'CertificatePropertiesWithNonce'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type':", "True}, 'start_time_utc': {'readonly': True}, 'end_time_utc': {'readonly': True}, 'type': {'readonly': True},", "= { 'value': {'key': 'value', 'type': '[IotHubSkuDescription]'}, 'next_link': {'key': 'nextLink',", "\"Free\", \"Standard\", \"Basic\". :vartype tier: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuTier :param capacity:", "values include: \"unknown\", \"export\", \"import\", \"backup\", \"readDeviceProperties\", \"writeDeviceProperties\", \"updateDeviceConfiguration\", \"rebootDevice\",", "to send to Azure. :param name: The name of the", "IotHub capacity. :type capacity: ~azure.mgmt.iothub.v2019_11_04.models.IotHubCapacity \"\"\" _validation = { 'resource_type':", ":type key_name: str :param primary_key: The primary key. :type primary_key:", "True}, 'is_verified': {'readonly': True}, 'created': {'readonly': True}, 'updated': {'readonly': True},", "list[str] \"\"\" _validation = { 'key': {'required': True}, 'value': {'required':", "The subscription identifier of the service bus topic endpoint. :type", "fallback route is enabled. :type is_enabled: bool \"\"\" _validation =", ":vartype type: str or ~azure.mgmt.iothub.v2019_11_04.models.JobType :ivar status: The status of", "'type': 'str'}, 'source': {'key': 'source', 'type': 'str'}, 'condition': {'key': 'condition',", "= None self.status_message = None self.parent_job_id = None class JobResponseListResult(msrest.serialization.Model):", "tier: The billing tier for the IoT hub. Possible values", ":vartype etag: str \"\"\" _validation = { 'id': {'readonly': True},", "'parent_job_id': {'key': 'parentJobId', 'type': 'str'}, } def __init__( self, **kwargs", "\"\"\"The JSON-serialized X509 Certificate. :param certificate: base-64 representation of the", "): super(SharedAccessSignatureAuthorizationRuleListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None", ":type primary_key: str :param secondary_key: The secondary key. :type secondary_key:", ":ivar thumbprint: The certificate's thumbprint. :vartype thumbprint: str :ivar is_verified:", "request. :ivar name: Operation name: {provider}/{resource}/{read | write | action", "= kwargs.get('partition_count', None) self.partition_ids = None self.path = None self.endpoint", "consumer group names with a next link. Variables are only", ":param id: IotHub type id. :type id: str :param type:", "None self.type = None class CloudToDeviceProperties(msrest.serialization.Model): \"\"\"The IoT hub cloud-to-device", "= None class JobResponseListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of JobResponse objects", "When this property is not set, the messages which do", "super(TagsResource, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) class TestAllRoutesInput(msrest.serialization.Model): \"\"\"Input for", "'certificate', 'type': 'str'}, } def __init__( self, **kwargs ): super(CertificateProperties,", "populated in order to send to Azure. :param connection_string: Required.", "endpoint. The status of an unhealthy endpoint will be updated", "kwargs.get('etag', None) self.properties = kwargs.get('properties', None) self.sku = kwargs['sku'] class", "'type': '[JobResponse]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(", "self.file_name_format = kwargs.get('file_name_format', None) self.batch_frequency_in_seconds = kwargs.get('batch_frequency_in_seconds', None) self.max_chunk_size_in_bytes =", "{'readonly': True}, 'verification_code': {'readonly': True}, 'certificate': {'readonly': True}, } _attribute_map", "None) class ImportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters when requesting an", "__init__( self, **kwargs ): super(RoutingMessage, self).__init__(**kwargs) self.body = kwargs.get('body', None)", "{'key': 'value', 'type': 'str'}, 'localized_value': {'key': 'localizedValue', 'type': 'str'}, }", "properties of the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :param endpoints: The", "'[CertificateDescription]'}, } def __init__( self, **kwargs ): super(CertificateListDescription, self).__init__(**kwargs) self.value", "'max_chunk_size_in_bytes': {'key': 'maxChunkSizeInBytes', 'type': 'int'}, 'encoding': {'key': 'encoding', 'type': 'str'},", "kwargs.get('event_hubs', None) self.storage_containers = kwargs.get('storage_containers', None) class RoutingEventHubProperties(msrest.serialization.Model): \"\"\"The properties", "str \"\"\" _attribute_map = { 'value': {'key': 'value', 'type': 'str'},", "'jobId', 'type': 'str'}, 'start_time_utc': {'key': 'startTimeUtc', 'type': 'rfc-1123'}, 'end_time_utc': {'key':", "{ 'sas_ttl_as_iso8601': {'key': 'sasTtlAsIso8601', 'type': 'duration'}, 'connection_string': {'key': 'connectionString', 'type':", "accepting messages, after IoT Hub retried sending messages for the", "to service bus queue endpoint types. All required parameters must", "request. :param retention_time_in_days: The retention time for device-to-cloud messages in", "current_value: long :ivar max_value: The maximum value of the quota", "} _attribute_map = { 'key': {'key': 'key', 'type': 'str'}, 'value':", "in order to send to Azure. :param name: Required. The", "keys should be excluded during export. :type exclude_keys: bool \"\"\"", ":type default_ttl_as_iso8601: ~datetime.timedelta :param feedback: The properties of the feedback", "'type': 'str'}, 'failure_reason': {'key': 'failureReason', 'type': 'str'}, 'status_message': {'key': 'statusMessage',", "source to which the routing rule is to be applied", "value is 300 seconds. :type batch_frequency_in_seconds: int :param max_chunk_size_in_bytes: Maximum", "'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, } def __init__( self,", "available to consume before it is expired by the IoT", "a next link. Variables are only populated by the server,", "'default_ttl_as_iso8601': {'key': 'defaultTtlAsIso8601', 'type': 'duration'}, 'feedback': {'key': 'feedback', 'type': 'FeedbackProperties'},", "calls for the IoT hub. :type event_hub_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.EventHubProperties] :param", "self.value = kwargs.get('value', None) self.next_link = None class EnrichmentProperties(msrest.serialization.Model): \"\"\"The", "__init__( self, **kwargs ): super(IotHubSkuDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None)", "Required. The input blob container URI. :type input_blob_container_uri: str :param", "True}, 'default': {'readonly': True}, 'scale_type': {'readonly': True}, } _attribute_map =", "periods, underscores, hyphens and has a maximum length of 64", "= None self.sku = kwargs['sku'] self.capacity = kwargs['capacity'] class IotHubSkuDescriptionListResult(msrest.serialization.Model):", "The secondary region is the Azure disaster recovery (DR) paired", "queue for cloud-to-device messages. :param lock_duration_as_iso8601: The lock duration for", "super(EndpointHealthData, self).__init__(**kwargs) self.endpoint_id = kwargs.get('endpoint_id', None) self.health_status = kwargs.get('health_status', None)", "True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'source': {'required': True}, 'endpoint_names': {'required': True, 'max_items':", ":type file_name_format: str :param batch_frequency_in_seconds: Time interval at which blobs", "property. :type value: str :param endpoint_names: Required. The list of", "\"\"\"Compilation error when evaluating route. :param message: Route error message.", "that the IoT Hub has not established a connection with", "to Azure. :param name: The name of the route. The", "'value', 'type': '[EventHubConsumerGroupInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def", "self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class EventHubProperties(msrest.serialization.Model):", "notifications are enabled. :type enable_file_upload_notifications: bool :param cloud_to_device: The IoT", "content. :type certificate: str \"\"\" _validation = { 'subject': {'readonly':", "endpoints for which the enrichment is applied to the message.", "'{str}'}, } def __init__( self, **kwargs ): super(Resource, self).__init__(**kwargs) self.id", "max_delivery_count: The max delivery count for cloud-to-device messages in the", "} def __init__( self, **kwargs ): super(RoutingProperties, self).__init__(**kwargs) self.endpoints =", "true by default. For grammar, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str", "IotHub type id. :type id: str :param type: Response type.", "request. :ivar value: List of IoT Hub operations supported by", "'is_enabled': {'required': True}, } _attribute_map = { 'name': {'key': 'name',", "this property is not set, the messages which do not", "'type': 'int'}, 'default_ttl_as_iso8601': {'key': 'defaultTtlAsIso8601', 'type': 'duration'}, 'feedback': {'key': 'feedback',", "'failoverRegion', 'type': 'str'}, } def __init__( self, **kwargs ): super(FailoverInput,", "value is 'avro'. Possible values include: \"Avro\", \"AvroDeflate\", \"JSON\". :type", "'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } def", "1, 'min_items': 1}, 'is_enabled': {'required': True}, } _attribute_map = {", "when sending a request. :param value: List of consumer groups", "the request to list IoT Hub operations. It contains a", ":vartype state: str :ivar host_name: The name of the host.", "None class EventHubProperties(msrest.serialization.Model): \"\"\"The properties of the provisioned Event Hub-compatible", "'verification_code': {'key': 'verificationCode', 'type': 'str'}, 'certificate': {'key': 'certificate', 'type': 'str'},", "str :ivar endpoint: The Event Hub-compatible endpoint. :vartype endpoint: str", "URI. :type output_blob_container_uri: str \"\"\" _validation = { 'input_blob_container_uri': {'required':", "{ 'connection_string': {'required': True}, 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, }", "for license information. # Code generated by Microsoft (R) AutoRest", "rule is to be applied to. For example, DeviceMessages. Possible", "and must be unique. :type name: str :param source: Required.", "'state', 'type': 'str'}, 'host_name': {'key': 'hostName', 'type': 'str'}, 'event_hub_endpoints': {'key':", "there are any. :vartype next_link: str \"\"\" _validation = {", "_attribute_map = { 'value': {'key': 'value', 'type': '[IotHubDescription]'}, 'next_link': {'key':", "the resource. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo :param capacity: Required. IotHub capacity.", "} def __init__( self, **kwargs ): super(RouteErrorPosition, self).__init__(**kwargs) self.line =", "The name of the Azure region. :type location: str :param", "'statusMessage', 'type': 'str'}, 'parent_job_id': {'key': 'parentJobId', 'type': 'str'}, } def", "hub uses to route messages to endpoints. All required parameters", "None self.thumbprint = None self.is_verified = None self.created = None", "resource group of the service bus topic endpoint. :type resource_group:", "by this IoT hub. :param lock_duration_as_iso8601: The lock duration. See:", "'type': 'str'}, 'details': {'key': 'details', 'type': 'str'}, } def __init__(", "self.reason = None self.message = kwargs.get('message', None) class IotHubProperties(msrest.serialization.Model): \"\"\"The", "} _attribute_map = { 'value': {'key': 'value', 'type': '[IotHubSkuDescription]'}, 'next_link':", "str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSku :ivar tier: The billing tier for the", "# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved.", "status_message: The status message for the job. :vartype status_message: str", "'int'}, 'partition_ids': {'key': 'partitionIds', 'type': '[str]'}, 'path': {'key': 'path', 'type':", "count for cloud-to-device messages in the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-", "or rejected from this endpoint. Possible values include: \"unknown\", \"healthy\",", "__init__( self, **kwargs ): super(RegistryStatistics, self).__init__(**kwargs) self.total_device_count = None self.enabled_device_count", "for unavailability. Possible values include: \"Invalid\", \"AlreadyExists\". :vartype reason: str", "max_delivery_count: int \"\"\" _validation = { 'max_delivery_count': {'maximum': 100, 'minimum':", "failure_reason: str :ivar status_message: The status message for the job.", "super(Resource, self).__init__(**kwargs) self.id = None self.name = None self.type =", "= kwargs.get('secondary_key', None) self.rights = kwargs['rights'] class SharedAccessSignatureAuthorizationRuleListResult(msrest.serialization.Model): \"\"\"The list", "{'key': 'outputBlobContainerUri', 'type': 'str'}, } def __init__( self, **kwargs ):", ":ivar http_status_code: The HTTP status code. :vartype http_status_code: str :ivar", ":type is_enabled: bool \"\"\" _validation = { 'name': {'required': True,", "super(StorageEndpointProperties, self).__init__(**kwargs) self.sas_ttl_as_iso8601 = kwargs.get('sas_ttl_as_iso8601', None) self.connection_string = kwargs['connection_string'] self.container_name", "str :param endpoint_names: Required. The list of endpoints to which", "self).__init__(**kwargs) self.properties = kwargs.get('properties', None) class MessagingEndpointProperties(msrest.serialization.Model): \"\"\"The properties of", "routing rule. If no condition is provided, it evaluates to", "an IoT Hub instance. :param tags: A set of tags.", "name: str \"\"\" _validation = { 'name': {'required': True}, }", "route messages to built-in and custom endpoints. A maximum of", "Required. The connection string for the Azure Storage account to", "'resourceGroup', 'type': 'str'}, 'container_name': {'key': 'containerName', 'type': 'str'}, 'file_name_format': {'key':", ":param value: List of consumer groups objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EventHubConsumerGroupInfo]", "dict[str, ~azure.mgmt.iothub.v2019_11_04.models.StorageEndpointProperties] :param messaging_endpoints: The messaging endpoint properties for the", "and time. :vartype expiry: ~datetime.datetime :ivar thumbprint: The certificate's thumbprint.", ":ivar details: The error details. :vartype details: str \"\"\" _validation", "'verification_code': {'readonly': True}, 'certificate': {'readonly': True}, } _attribute_map = {", "sku: Required. The type of the resource. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo", "values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type source: str", "} _attribute_map = { 'input_blob_container_uri': {'key': 'inputBlobContainerUri', 'type': 'str'}, 'output_blob_container_uri':", "str :param location: Required. The resource location. :type location: str", "{'key': 'message', 'type': 'str'}, 'severity': {'key': 'severity', 'type': 'str'}, 'location':", "\"\"\" _validation = { 'job_id': {'readonly': True}, 'start_time_utc': {'readonly': True},", "'result', 'type': 'str'}, 'details': {'key': 'details', 'type': 'TestRouteResultDetails'}, } def", "~azure.mgmt.iothub.v2019_11_04.models.Name \"\"\" _attribute_map = { 'id': {'key': 'id', 'type': 'str'},", "): super(OperationListResult, self).__init__(**kwargs) self.value = None self.next_link = None class", "Required. The connection string of the event hub endpoint. :type", "ignored when sending a request. :ivar total_device_count: The total count", "'[RoutingServiceBusTopicEndpointProperties]'}, 'event_hubs': {'key': 'eventHubs', 'type': '[RoutingEventHubProperties]'}, 'storage_containers': {'key': 'storageContainers', 'type':", "hub routes messages to, based on the routing rules. This", "self.max_delivery_count = kwargs.get('max_delivery_count', None) self.default_ttl_as_iso8601 = kwargs.get('default_ttl_as_iso8601', None) self.feedback =", "a request. :ivar value: List of IoT Hub operations supported", "class RegistryStatistics(msrest.serialization.Model): \"\"\"Identity registry statistics. Variables are only populated by", "value: :type value: list[~azure.mgmt.iothub.v2019_11_04.models.UserSubscriptionQuota] :ivar next_link: :vartype next_link: str \"\"\"", "None self.end_time_utc = None self.type = None self.status = None", "self.certificate = kwargs.get('certificate', None) class CertificatePropertiesWithNonce(msrest.serialization.Model): \"\"\"The description of an", "populated in order to send to Azure. :param message: Routing", "= kwargs['name'] self.subscription_id = kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None)", "'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key':", "the root container where you upload files. The container need", "an IoT hub. Variables are only populated by the server,", "= { 'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'}, 'default_ttl_as_iso8601': {'key': 'defaultTtlAsIso8601',", "provide parameters when requesting an export of all devices in", "kwargs.get('end', None) class RouteProperties(msrest.serialization.Model): \"\"\"The properties of a routing rule", "IotHubDescription(Resource): \"\"\"The description of the IoT hub. Variables are only", "not provided it will evaluate to true by default. For", "sas_ttl_as_iso8601: ~datetime.timedelta :param connection_string: Required. The connection string for the", "{'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag':", "Certificate. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificateProperties :ivar id: The resource identifier. :vartype", "'type': 'rfc-1123'}, 'type': {'key': 'type', 'type': 'str'}, 'status': {'key': 'status',", "have been delivered to or rejected from this endpoint. Possible", "based on the routing rules. :type service_bus_queues: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusQueueEndpointProperties] :param service_bus_topics:", "Routing Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _validation = {", "'rfc-1123'}, 'updated': {'key': 'updated', 'type': 'rfc-1123'}, 'certificate': {'key': 'certificate', 'type':", "self.is_enabled = kwargs['is_enabled'] class FeedbackProperties(msrest.serialization.Model): \"\"\"The properties of the feedback", "'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'container_name': {'required': True}, 'batch_frequency_in_seconds': {'maximum':", "self).__init__(**kwargs) self.subject = None self.expiry = None self.thumbprint = None", ":type event_hub_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.EventHubProperties] :param routing: The routing related properties", "long :ivar maximum: The maximum number of units. :vartype maximum:", "'[RoutingServiceBusQueueEndpointProperties]'}, 'service_bus_topics': {'key': 'serviceBusTopics', 'type': '[RoutingServiceBusTopicEndpointProperties]'}, 'event_hubs': {'key': 'eventHubs', 'type':", "that the endpoint is accepting messages as expected. The 'unhealthy'", "__init__( self, **kwargs ): super(StorageEndpointProperties, self).__init__(**kwargs) self.sas_ttl_as_iso8601 = kwargs.get('sas_ttl_as_iso8601', None)", "'bool'}, 'reason': {'key': 'reason', 'type': 'str'}, 'message': {'key': 'message', 'type':", "'type': 'str'}, 'status_message': {'key': 'statusMessage', 'type': 'str'}, 'parent_job_id': {'key': 'parentJobId',", "None) class UserSubscriptionQuota(msrest.serialization.Model): \"\"\"User subscription quota response. :param id: IotHub", "'tags': {'key': 'tags', 'type': '{str}'}, 'etag': {'key': 'etag', 'type': 'str'},", "from azure.core.exceptions import HttpResponseError import msrest.serialization class CertificateBodyDescription(msrest.serialization.Model): \"\"\"The JSON-serialized", "the routing rule. If no condition is provided, it evaluates", "str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerPropertiesEncoding \"\"\" _validation = { 'connection_string': {'required': True},", "to messages to be delivered to built-in and custom endpoints.", "the IoT hub uses to route messages to built-in and", "self.type = None self.status = None self.failure_reason = None self.status_message", "{ 'name': {'key': 'name', 'type': 'str'}, 'display': {'key': 'display', 'type':", "in the Event Hub-compatible endpoint. :vartype partition_ids: list[str] :ivar path:", "of units. :vartype minimum: long :ivar maximum: The maximum number", "whether the fallback route is enabled. :type is_enabled: bool \"\"\"", "): super(RouteProperties, self).__init__(**kwargs) self.name = kwargs['name'] self.source = kwargs['source'] self.condition", "None) class RoutingServiceBusTopicEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to service bus topic", "'bool'}, } def __init__( self, **kwargs ): super(FallbackRouteProperties, self).__init__(**kwargs) self.name", ":type resource_group: str :param container_name: Required. The name of storage", "str :ivar expiry: The certificate's expiration date and time. :vartype", ":param value: The array of IotHubSkuDescription. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuDescription] :ivar", "name: IotHub type. :type name: ~azure.mgmt.iothub.v2019_11_04.models.Name \"\"\" _attribute_map = {", "__init__( self, **kwargs ): super(MessagingEndpointProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None)", "which files are uploaded. :type connection_string: str :param container_name: Required.", "can be reordered. :type file_name_format: str :param batch_frequency_in_seconds: Time interval", "properties. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.IotHubProperties :param sku: Required. IotHub SKU info.", "list of endpoints to which messages that satisfy the condition", "= None self.created = None self.updated = None self.certificate =", "to built-in and custom endpoints. A maximum of 100 routing", "specify whether a route is enabled. :type is_enabled: bool \"\"\"", ":type container_name: str :param file_name_format: File name format for the", "True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[IotHubSkuDescription]'},", "messages for the retrial period. See IoT Hub metrics to", "{'readonly': True}, 'current_value': {'readonly': True}, 'max_value': {'readonly': True}, } _attribute_map", "'maxDeliveryCount', 'type': 'int'}, 'default_ttl_as_iso8601': {'key': 'defaultTtlAsIso8601', 'type': 'duration'}, 'feedback': {'key':", "'type', 'type': 'str'}, 'unit': {'key': 'unit', 'type': 'str'}, 'current_value': {'key':", "ignored when sending a request. :ivar subject: The certificate's subject", "self.state = None self.host_name = None self.event_hub_endpoints = kwargs.get('event_hub_endpoints', None)", "= kwargs.get('value', None) class CertificateProperties(msrest.serialization.Model): \"\"\"The description of an X509", "'type': 'str'}, 'location': {'key': 'location', 'type': 'RouteErrorRange'}, } def __init__(", "consumer groups objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EventHubConsumerGroupInfo] :ivar next_link: The next", "bool :ivar reason: The reason for unavailability. Possible values include:", "): super(RoutingProperties, self).__init__(**kwargs) self.endpoints = kwargs.get('endpoints', None) self.routes = kwargs.get('routes',", "'long'}, } def __init__( self, **kwargs ): super(RegistryStatistics, self).__init__(**kwargs) self.total_device_count", "\"\"\"Input for testing route. All required parameters must be populated", "name: {provider}/{resource}/{read | write | action | delete}. :vartype name:", "def __init__( self, **kwargs ): super(UserSubscriptionQuotaListResult, self).__init__(**kwargs) self.value = kwargs.get('value',", "def __init__( self, **kwargs ): super(IotHubSkuDescription, self).__init__(**kwargs) self.resource_type = None", "consumer group identifier. :vartype id: str :ivar name: The Event", "event_hubs: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingEventHubProperties] :param storage_containers: The list of storage container endpoints", ":type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubDescription] :ivar next_link: The next link. :vartype next_link:", "devices in the hub. All required parameters must be populated", "the operation. :vartype operation: str :ivar description: Description of the", "to, based on the routing rules. :type storage_containers: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerProperties] \"\"\"", "'total_device_count': {'key': 'totalDeviceCount', 'type': 'long'}, 'enabled_device_count': {'key': 'enabledDeviceCount', 'type': 'long'},", "'sasTtlAsIso8601', 'type': 'duration'}, 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'container_name': {'key':", "'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key':", "'serviceBusQueues', 'type': '[RoutingServiceBusQueueEndpointProperties]'}, 'service_bus_topics': {'key': 'serviceBusTopics', 'type': '[RoutingServiceBusTopicEndpointProperties]'}, 'event_hubs': {'key':", "set of results. Variables are only populated by the server,", "'type': 'bool'}, 'created': {'key': 'created', 'type': 'rfc-1123'}, 'updated': {'key': 'updated',", "it is expired by the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#cloud-to-device-messages.", "IotHubLocationDescription(msrest.serialization.Model): \"\"\"Public representation of one of the locations where a", "self.rights = kwargs['rights'] class SharedAccessSignatureAuthorizationRuleListResult(msrest.serialization.Model): \"\"\"The list of shared access", "sending a request. :param retention_time_in_days: The retention time for device-to-cloud", "Required. The export blob container URI. :type export_blob_container_uri: str :param", "uses these properties when it routes messages to the fallback", "'message', 'type': 'RoutingMessage'}, 'route': {'key': 'route', 'type': 'RouteProperties'}, 'twin': {'key':", "cloud-to-device messages in the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type", "'[IotHubQuotaMetricInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self,", "is available to consume before it is expired by the", "URI generated by IoT Hub for file upload is valid.", "of the event hub endpoint. :type resource_group: str \"\"\" _validation", "= None self.type = None self.status = None self.failure_reason =", "of enabled devices in the identity registry. :vartype enabled_device_count: long", "} def __init__( self, **kwargs ): super(MatchedRoute, self).__init__(**kwargs) self.properties =", "data for an endpoint. :param endpoint_id: Id of the endpoint.", "__init__( self, **kwargs ): super(Name, self).__init__(**kwargs) self.value = kwargs.get('value', None)", "True}, } _attribute_map = { 'total_device_count': {'key': 'totalDeviceCount', 'type': 'long'},", ":type connection_string: str :param name: Required. The name that identifies", "is set to True, causes an error to be thrown.", "self).__init__(**kwargs) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class", "resource_group: str \"\"\" _validation = { 'connection_string': {'required': True}, 'name':", ":ivar tier: The billing tier for the IoT hub. Possible", "of EndpointHealthData objects with a next link. Variables are only", "of times the IoT hub attempts to deliver a message.", "The name of the resource group of the storage account.", "name: str :param source: Required. The source to which the", "'hostName', 'type': 'str'}, 'event_hub_endpoints': {'key': 'eventHubEndpoints', 'type': '{EventHubProperties}'}, 'routing': {'key':", "~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition :param end: End where the route error happened. :type", "= { 'export_blob_container_uri': {'key': 'exportBlobContainerUri', 'type': 'str'}, 'exclude_keys': {'key': 'excludeKeys',", "request. :param value: The array of JobResponse objects. :type value:", "Azure. :param message: Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param route:", "~azure.mgmt.iothub.v2019_11_04.models.EventHubProperties] :param routing: The routing related properties of the IoT", "): super(RoutingServiceBusQueueEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id", "} _attribute_map = { 'export_blob_container_uri': {'key': 'exportBlobContainerUri', 'type': 'str'}, 'exclude_keys':", "array of quota metrics objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubQuotaMetricInfo] :ivar next_link:", "AutoRest Code Generator. # Changes may cause incorrect behavior and", "Hub is retrying to send data to this endpoint. The", "messages to, based on the routing rules. This list does", "def __init__( self, **kwargs ): super(ExportDevicesRequest, self).__init__(**kwargs) self.export_blob_container_uri = kwargs['export_blob_container_uri']", "super(RouteErrorRange, self).__init__(**kwargs) self.start = kwargs.get('start', None) self.end = kwargs.get('end', None)", "= { 'input_blob_container_uri': {'required': True}, 'output_blob_container_uri': {'required': True}, } _attribute_map", ":type tags: dict[str, str] :param etag: The Etag field is", "after IoT Hub retried sending messages for the retrial period.", "{'key': 'result', 'type': 'str'}, 'details': {'key': 'details', 'type': 'TestRouteResultDetails'}, }", "a route is enabled. :type is_enabled: bool \"\"\" _validation =", "next link. Variables are only populated by the server, and", "'is_verified': {'key': 'isVerified', 'type': 'bool'}, 'created': {'key': 'created', 'type': 'rfc-1123'},", "'str'}, 'container_name': {'key': 'containerName', 'type': 'str'}, } def __init__( self,", "IoT hub. :type authorization_policies: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :param ip_filter_rules: The IP filter", "self, **kwargs ): super(IotHubQuotaMetricInfo, self).__init__(**kwargs) self.name = None self.current_value =", "primary or secondary. The primary region is where the IoT", "} def __init__( self, **kwargs ): super(RouteProperties, self).__init__(**kwargs) self.name =", "consistent state of health. The 'dead' status shows that the", "'str'}, 'properties': {'key': 'properties', 'type': 'IotHubProperties'}, 'sku': {'key': 'sku', 'type':", "{'key': 'properties', 'type': 'IotHubProperties'}, 'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'}, }", "self.resource_group = kwargs.get('resource_group', None) class RoutingStorageContainerProperties(msrest.serialization.Model): \"\"\"The properties related to", "blobs are written to storage. Value should be between 60", "def __init__( self, **kwargs ): super(IotHubDescription, self).__init__(**kwargs) self.etag = kwargs.get('etag',", "id: str :param type: Response type. :type type: str :param", "properties related to an event hub endpoint. All required parameters", "{'key': 'nameAvailable', 'type': 'bool'}, 'reason': {'key': 'reason', 'type': 'str'}, 'message':", "'lockDurationAsIso8601', 'type': 'duration'}, 'ttl_as_iso8601': {'key': 'ttlAsIso8601', 'type': 'duration'}, 'max_delivery_count': {'key':", "message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param twin: Routing Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin", "The number of provisioned IoT Hub units. See: https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits. :type", "= None self.name = None self.type = None self.location =", "No messages have been delivered to or rejected from this", "shows that the endpoint is accepting messages as expected. The", "The name of the IP filter rule. :type filter_name: str", "maximum value of the quota metric. :vartype max_value: long \"\"\"", "__init__( self, **kwargs ): super(FeedbackProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None)", "response. :param id: IotHub type id. :type id: str :param", "'name': {'key': 'name', 'type': 'Name'}, } def __init__( self, **kwargs", "hub endpoint. All required parameters must be populated in order", "= kwargs['name'] self.source = kwargs['source'] self.condition = kwargs.get('condition', None) self.endpoint_names", "Azure. :param name: Required. The name of the IoT hub", "fallback_route: The properties of the route that is used as", ":param name: Required. The name of the SKU. Possible values", "{ 'properties': {'key': 'properties', 'type': '{str}'}, 'id': {'key': 'id', 'type':", "Required. The resource location. :type location: str :param tags: A", "are reserved: events, fileNotifications, $default. Endpoint names must be unique", "The period of time for which the SAS URI generated", "desired properties. :type reported: object \"\"\" _attribute_map = { 'desired':", "None) class RouteErrorRange(msrest.serialization.Model): \"\"\"Range of route errors. :param start: Start", "proof of possession. :vartype verification_code: str :ivar certificate: The certificate", "times the IoT hub attempts to deliver a message. See:", "provisioning_state: The provisioning state. :vartype provisioning_state: str :ivar state: The", "endpoint. :type resource_group: str \"\"\" _validation = { 'connection_string': {'required':", "= kwargs.get('primary_key', None) self.secondary_key = kwargs.get('secondary_key', None) self.rights = kwargs['rights']", "request. :param properties: The description of an X509 CA Certificate.", "the service bus queue endpoint. :type subscription_id: str :param resource_group:", "= None self.certificate = kwargs.get('certificate', None) class CertificatePropertiesWithNonce(msrest.serialization.Model): \"\"\"The description", "in order to send to Azure. :param connection_string: Required. The", "messaging#device-to-cloud-messages. :type partition_count: int :ivar partition_ids: The partition ids in", "'type': 'int'}, } def __init__( self, **kwargs ): super(MessagingEndpointProperties, self).__init__(**kwargs)", "): super(RouteCompilationError, self).__init__(**kwargs) self.message = kwargs.get('message', None) self.severity = kwargs.get('severity',", "source that the routing rule is to be applied to,", "None class IotHubSkuDescription(msrest.serialization.Model): \"\"\"SKU properties. Variables are only populated by", "features enabled for the IoT hub. Possible values include: \"None\",", "'inputBlobContainerUri', 'type': 'str'}, 'output_blob_container_uri': {'key': 'outputBlobContainerUri', 'type': 'str'}, } def", "The start time of the job. :vartype start_time_utc: ~datetime.datetime :ivar", "self.exclude_keys = kwargs['exclude_keys'] class FailoverInput(msrest.serialization.Model): \"\"\"Use to provide failover region", "'[IotHubSkuDescription]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self,", "'str'}, } def __init__( self, **kwargs ): super(IotHubCapacity, self).__init__(**kwargs) self.minimum", "and time. :vartype updated: ~datetime.datetime :ivar verification_code: The certificate's verification", "Event Hubs endpoint. :type event_hubs: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingEventHubProperties] :param storage_containers: The list", "self.value = kwargs.get('value', None) self.next_link = None class StorageEndpointProperties(msrest.serialization.Model): \"\"\"The", "'value': {'key': 'value', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'next_link': {'key': 'nextLink', 'type': 'str'},", "super(IotHubNameAvailabilityInfo, self).__init__(**kwargs) self.name_available = None self.reason = None self.message =", "routes: list[~azure.mgmt.iothub.v2019_11_04.models.MatchedRoute] \"\"\" _attribute_map = { 'routes': {'key': 'routes', 'type':", "or ~azure.mgmt.iothub.v2019_11_04.models.IpFilterActionType :param ip_mask: Required. A string that contains the", "name can only include alphanumeric characters, periods, underscores, hyphens and", "tags. :type tags: dict[str, str] :param etag: The Etag field", "class TestAllRoutesInput(msrest.serialization.Model): \"\"\"Input for testing all routes. :param routing_source: Routing", "None) self.max_delivery_count = kwargs.get('max_delivery_count', None) class ImportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide", "_attribute_map = { 'value': {'key': 'value', 'type': '[EndpointHealthData]'}, 'next_link': {'key':", "Changes may cause incorrect behavior and will be lost if", "'str'}, } def __init__( self, **kwargs ): super(ErrorDetails, self).__init__(**kwargs) self.code", "{ 'provider': {'key': 'provider', 'type': 'str'}, 'resource': {'key': 'resource', 'type':", "'line', 'type': 'int'}, 'column': {'key': 'column', 'type': 'int'}, } def", "default: The default number of units. :vartype default: long :ivar", ":vartype name: str :ivar type: the resource type. :vartype type:", "**kwargs ): super(TestRouteInput, self).__init__(**kwargs) self.message = kwargs.get('message', None) self.route =", "'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type':", "enrichment property. :type value: str :param endpoint_names: Required. The list", "): super(OperationInputs, self).__init__(**kwargs) self.name = kwargs['name'] class OperationListResult(msrest.serialization.Model): \"\"\"Result of", ":param ttl_as_iso8601: The period of time for which a message", "the job. Possible values include: \"unknown\", \"export\", \"import\", \"backup\", \"readDeviceProperties\",", "certificate. :vartype name: str :ivar etag: The entity tag. :vartype", "endpoint types for free hubs. :type endpoints: ~azure.mgmt.iothub.v2019_11_04.models.RoutingEndpoints :param routes:", "ignored when sending a request. :param value: JSON-serialized array of", "based on the routing rules. :type service_bus_topics: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusTopicEndpointProperties] :param event_hubs:", "'FeedbackProperties'}, } def __init__( self, **kwargs ): super(CloudToDeviceProperties, self).__init__(**kwargs) self.max_delivery_count", "to Azure. :param name: Required. The name of the SKU.", "'name': {'readonly': True}, 'current_value': {'readonly': True}, 'max_value': {'readonly': True}, }", "'certificate', 'type': 'str'}, } def __init__( self, **kwargs ): super(CertificateBodyDescription,", "The certificate's thumbprint. :vartype thumbprint: str :ivar is_verified: Determines whether", "happened. :type start: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition :param end: End where the route", "Possible values include: \"error\", \"warning\". :type severity: str or ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorSeverity", "'startTimeUtc', 'type': 'rfc-1123'}, 'end_time_utc': {'key': 'endTimeUtc', 'type': 'rfc-1123'}, 'type': {'key':", "kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class RoutingTwinProperties(msrest.serialization.Model): \"\"\"RoutingTwinProperties. :param", "HttpResponseError import msrest.serialization class CertificateBodyDescription(msrest.serialization.Model): \"\"\"The JSON-serialized X509 Certificate. :param", ":param certificate: The certificate content. :type certificate: str \"\"\" _validation", "possession. :vartype verification_code: str :ivar certificate: The certificate content. :vartype", "where you upload files. The container need not exist but", "'type': 'int'}, 'limit': {'key': 'limit', 'type': 'int'}, 'name': {'key': 'name',", "provided it will evaluate to true by default. For grammar,", "class TagsResource(msrest.serialization.Model): \"\"\"A container holding only the Tags for a", "__init__( self, **kwargs ): super(CertificateDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties', None)", "} def __init__( self, **kwargs ): super(CertificateListDescription, self).__init__(**kwargs) self.value =", "} def __init__( self, **kwargs ): super(RoutingEventHubProperties, self).__init__(**kwargs) self.connection_string =", "one storage account causes an error to be thrown. Not", "the quota metric. :vartype current_value: long :ivar max_value: The maximum", "system_properties: dict[str, str] \"\"\" _attribute_map = { 'body': {'key': 'body',", "__init__( self, **kwargs ): super(IotHubSkuInfo, self).__init__(**kwargs) self.name = kwargs['name'] self.tier", "path: str :ivar endpoint: The Event Hub-compatible endpoint. :vartype endpoint:", "metric. :vartype current_value: long :ivar max_value: The maximum value of", "and a URL link to get the next set of", "updated to healthy when IoT Hub has established an eventually", "dict[str, str] \"\"\" _attribute_map = { 'tags': {'key': 'tags', 'type':", "None self.default = None self.scale_type = None class Resource(msrest.serialization.Model): \"\"\"The", "'int'}, 'encoding': {'key': 'encoding', 'type': 'str'}, } def __init__( self,", "kwargs['ip_mask'] class JobResponse(msrest.serialization.Model): \"\"\"The properties of the Job Response object.", "unit: Unit of IotHub type. :type unit: str :param current_value:", "'tags', 'type': '{str}'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties': {'key':", "'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, 'container_name': {'key': 'containerName',", ":param messaging_endpoints: The messaging endpoint properties for the file upload", "{ 'route': {'required': True}, } _attribute_map = { 'message': {'key':", "Required. The list of endpoints for which the enrichment is", "_validation = { 'minimum': {'readonly': True, 'maximum': 1, 'minimum': 1},", "send to Azure. :param connection_string: Required. The connection string of", "self.source = kwargs['source'] self.condition = kwargs.get('condition', None) self.endpoint_names = kwargs['endpoint_names']", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload. :type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count: The number", "class EnrichmentProperties(msrest.serialization.Model): \"\"\"The properties of an enrichment that your IoT", "All parameters are mandatory but can be reordered. :type file_name_format:", "JSON-serialized array of EndpointHealthData objects with a next link. Variables", "None self.name = None self.type = None self.etag = None", "'minimum': 1}, } _attribute_map = { 'lock_duration_as_iso8601': {'key': 'lockDurationAsIso8601', 'type':", "'value': {'key': 'value', 'type': '[IotHubQuotaMetricInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'},", "'minimum', 'type': 'long'}, 'maximum': {'key': 'maximum', 'type': 'long'}, 'default': {'key':", "'type': 'str'}, 'tier': {'key': 'tier', 'type': 'str'}, 'capacity': {'key': 'capacity',", "\"\"\"Detailed result of testing a route. :param compilation_errors: JSON-serialized list", "kwargs['exclude_keys'] class FailoverInput(msrest.serialization.Model): \"\"\"Use to provide failover region when requesting", "self.name = kwargs['name'] class OperationListResult(msrest.serialization.Model): \"\"\"Result of the request to", "routing related properties of the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :param", "_attribute_map = { 'value': {'key': 'value', 'type': '[EventHubConsumerGroupInfo]'}, 'next_link': {'key':", "'enabled_device_count': {'readonly': True}, 'disabled_device_count': {'readonly': True}, } _attribute_map = {", "ip_filter_rules: The IP filter rules. :type ip_filter_rules: list[~azure.mgmt.iothub.v2019_11_04.models.IpFilterRule] :ivar provisioning_state:", "'value': {'key': 'value', 'type': '[IotHubDescription]'}, 'next_link': {'key': 'nextLink', 'type': 'str'},", "input parameter. This is an optional parameter. :param tags: A", "'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly': True}, 'location': {'required': True}, } _attribute_map", "name: The name of the route. The name can only", "**kwargs ): super(JobResponse, self).__init__(**kwargs) self.job_id = None self.start_time_utc = None", "send to Azure. :param export_blob_container_uri: Required. The export blob container", "kwargs.get('secondary_key', None) self.rights = kwargs['rights'] class SharedAccessSignatureAuthorizationRuleListResult(msrest.serialization.Model): \"\"\"The list of", "{'key': 'tags', 'type': '{str}'}, 'etag': {'key': 'etag', 'type': 'str'}, 'properties':", "'type': 'long'}, 'maximum': {'key': 'maximum', 'type': 'long'}, 'default': {'key': 'default',", "bool \"\"\" _validation = { 'export_blob_container_uri': {'required': True}, 'exclude_keys': {'required':", "will be ignored when sending a request. :ivar subject: The", "allowed. :type endpoint_names: list[str] :param is_enabled: Required. Used to specify", "class OperationDisplay(msrest.serialization.Model): \"\"\"The object that represents the operation. Variables are", "__init__( self, **kwargs ): super(RouteErrorPosition, self).__init__(**kwargs) self.line = kwargs.get('line', None)", "authorization_policies: The shared access policies you can use to secure", "kwargs.get('max_delivery_count', None) self.default_ttl_as_iso8601 = kwargs.get('default_ttl_as_iso8601', None) self.feedback = kwargs.get('feedback', None)", "or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param message: Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param", "and will be ignored when sending a request. :ivar name:", "the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload. :type ttl_as_iso8601: ~datetime.timedelta :param", "self.etag = None class EventHubConsumerGroupsListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of Event", "{'key': 'endpointNames', 'type': '[str]'}, } def __init__( self, **kwargs ):", "list[~azure.mgmt.iothub.v2019_11_04.models.JobResponse] :ivar next_link: The next link. :vartype next_link: str \"\"\"", "Type: IotHubs. :vartype resource: str :ivar operation: Name of the", "subscription identifier of the service bus queue endpoint. :type subscription_id:", "only 1 custom endpoint is allowed across all endpoint types", "by the IoT hub. Variables are only populated by the", "current value for the quota metric. :vartype current_value: long :ivar", "None) self.current_value = kwargs.get('current_value', None) self.limit = kwargs.get('limit', None) self.name", "create date and time. :vartype created: ~datetime.datetime :ivar updated: The", "class RouteCompilationError(msrest.serialization.Model): \"\"\"Compilation error when evaluating route. :param message: Route", "for the rule. :type ip_mask: str \"\"\" _validation = {", "current_value: int :param limit: Numerical limit on IotHub type. :type", ":vartype start_time_utc: ~datetime.datetime :ivar end_time_utc: The time the job stopped", "and has a maximum length of 64 characters. The following", "str :param severity: Severity of the route error. Possible values", "{ 'authorization_policies': {'key': 'authorizationPolicies', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'ip_filter_rules': {'key': 'ipFilterRules', 'type':", "} _attribute_map = { 'filter_name': {'key': 'filterName', 'type': 'str'}, 'action':", "creatable using the connectionString specified. :type container_name: str \"\"\" _validation", "list of route compilation errors. :type compilation_errors: list[~azure.mgmt.iothub.v2019_11_04.models.RouteCompilationError] \"\"\" _attribute_map", "Routing Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _attribute_map = {", "\"\"\" _attribute_map = { 'message': {'key': 'message', 'type': 'str'}, 'severity':", "route. IoT Hub uses these properties when it routes messages", "related to service bus queue endpoint types. All required parameters", "= kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601', None) self.max_delivery_count = kwargs.get('max_delivery_count',", "max delivery count for cloud-to-device messages in the device queue.", "reason: The reason for unavailability. Possible values include: \"Invalid\", \"AlreadyExists\".", "__init__( self, **kwargs ): super(CertificateProperties, self).__init__(**kwargs) self.subject = None self.expiry", "str :ivar current_value: The current value for the quota metric.", "healthy when IoT Hub has established an eventually consistent state", "access policies you can use to secure a connection to", "__init__( self, **kwargs ): super(JobResponseListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None)", "self.subject = None self.expiry = None self.thumbprint = None self.is_verified", "be ignored when sending a request. All required parameters must", "r'^[A-Za-z0-9-._]{1,64}$'}, } _attribute_map = { 'connection_string': {'key': 'connectionString', 'type': 'str'},", "_attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'source': {'key':", "{'key': 'value', 'type': '[IotHubDescription]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, }", "'long'}, 'max_value': {'key': 'maxValue', 'type': 'long'}, } def __init__( self,", "of 64 characters, and must be unique. :type name: str", "properties. :type system_properties: dict[str, str] \"\"\" _attribute_map = { 'body':", "The HTTP status code. :vartype http_status_code: str :ivar message: The", "id: str :ivar name: The name of the certificate. :vartype", "'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubSkuDescriptionListResult,", "): super(IpFilterRule, self).__init__(**kwargs) self.filter_name = kwargs['filter_name'] self.action = kwargs['action'] self.ip_mask", "file content. :type certificate: str \"\"\" _attribute_map = { 'certificate':", "Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param route: Required. Route properties.", "True}, 'parent_job_id': {'readonly': True}, } _attribute_map = { 'job_id': {'key':", "'message', 'type': 'str'}, 'details': {'key': 'details', 'type': 'str'}, } def", ":ivar id: The Event Hub-compatible consumer group identifier. :vartype id:", "kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601', None) self.max_delivery_count = kwargs.get('max_delivery_count', None)", "and will be ignored when sending a request. :ivar job_id:", "= kwargs.get('value', None) self.next_link = None class IotHubLocationDescription(msrest.serialization.Model): \"\"\"Public representation", "None) class CertificateDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables are only populated", "Azure. :param connection_string: Required. The connection string of the service", "when requesting an export of all devices in the IoT", "'ipMask', 'type': 'str'}, } def __init__( self, **kwargs ): super(IpFilterRule,", "str or ~azure.mgmt.iothub.v2019_11_04.models.AccessRights \"\"\" _validation = { 'key_name': {'required': True},", "{'key': 'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'IotHubProperties'}, 'sku':", "'primary_key': {'key': 'primaryKey', 'type': 'str'}, 'secondary_key': {'key': 'secondaryKey', 'type': 'str'},", "hub can failover to. Possible values include: \"primary\", \"secondary\". :type", "'type': 'RoutingTwin'}, } def __init__( self, **kwargs ): super(TestAllRoutesInput, self).__init__(**kwargs)", "self).__init__(**kwargs) self.desired = kwargs.get('desired', None) self.reported = kwargs.get('reported', None) class", "'is_verified': {'readonly': True}, 'created': {'readonly': True}, 'updated': {'readonly': True}, 'verification_code':", "{'key': 'parentJobId', 'type': 'str'}, } def __init__( self, **kwargs ):", "kwargs.get('ttl_as_iso8601', None) self.max_delivery_count = kwargs.get('max_delivery_count', None) class ImportDevicesRequest(msrest.serialization.Model): \"\"\"Use to", "= kwargs.get('certificate', None) class CertificatePropertiesWithNonce(msrest.serialization.Model): \"\"\"The description of an X509", "_attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'tier': {'key':", "'properties': {'key': 'properties', 'type': '{str}'}, 'id': {'key': 'id', 'type': 'str'},", "EndpointHealthDataListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of EndpointHealthData objects with a next", "is not accepting messages, after IoT Hub retried sending messages", "connection_string: Required. The connection string of the event hub endpoint.", "'minimum': 1}, 'maximum': {'readonly': True}, 'default': {'readonly': True}, 'scale_type': {'readonly':", "Microsoft Corporation. All rights reserved. # Licensed under the MIT", "__init__( self, **kwargs ): super(IotHubCapacity, self).__init__(**kwargs) self.minimum = None self.maximum", "SharedAccessSignatureAuthorizationRuleListResult(msrest.serialization.Model): \"\"\"The list of shared access policies with a next", "\"\"\"The properties related to the custom endpoints to which your", "self.description = None class OperationInputs(msrest.serialization.Model): \"\"\"Input values. All required parameters", "etag: The etag. :vartype etag: str \"\"\" _validation = {", "r'^[A-Za-z0-9-._]{1,64}$'}, 'source': {'required': True}, 'endpoint_names': {'required': True, 'max_items': 1, 'min_items':", "'storageContainers', 'type': '[RoutingStorageContainerProperties]'}, } def __init__( self, **kwargs ): super(RoutingEndpoints,", "Current number of IotHub type. :type current_value: int :param limit:", "order to send to Azure. :param failover_region: Required. Region the", "= None self.maximum = None self.default = None self.scale_type =", "'max_delivery_count': {'maximum': 100, 'minimum': 1}, } _attribute_map = { 'lock_duration_as_iso8601':", "list[~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerProperties] \"\"\" _attribute_map = { 'service_bus_queues': {'key': 'serviceBusQueues', 'type': '[RoutingServiceBusQueueEndpointProperties]'},", "the route error happened. :type end: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition \"\"\" _attribute_map =", "rules that the IoT hub uses to route messages to", "Bus topic endpoints that the IoT hub routes the messages", "self, **kwargs ): super(IotHubQuotaMetricInfoListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link", "by the Microsoft.Devices resource provider. :vartype value: list[~azure.mgmt.iothub.v2019_11_04.models.Operation] :ivar next_link:", "self.app_properties = kwargs.get('app_properties', None) self.system_properties = kwargs.get('system_properties', None) class RoutingProperties(msrest.serialization.Model):", "str :ivar name: The resource name. :vartype name: str :ivar", "export. :type exclude_keys: bool \"\"\" _validation = { 'export_blob_container_uri': {'required':", "self.resource_group = kwargs.get('resource_group', None) class RoutingMessage(msrest.serialization.Model): \"\"\"Routing message. :param body:", "A maximum of 100 routing rules are allowed for paid", "certificate. :param certificate: base-64 representation of X509 certificate .cer file", "'isVerified', 'type': 'bool'}, 'created': {'key': 'created', 'type': 'rfc-1123'}, 'updated': {'key':", "{'key': 'partitionCount', 'type': 'int'}, 'partition_ids': {'key': 'partitionIds', 'type': '[str]'}, 'path':", "'export_blob_container_uri': {'required': True}, 'exclude_keys': {'required': True}, } _attribute_map = {", "\"\"\" _validation = { 'key': {'required': True}, 'value': {'required': True},", "type: the resource type. :vartype type: str :ivar etag: The", "hub endpoint. :type subscription_id: str :param resource_group: The name of", "matched routes. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.MatchedRoute] \"\"\" _attribute_map = { 'routes':", "Bus queue endpoints that IoT hub routes the messages to,", "that will be used for proof of possession. :vartype verification_code:", "self.subscription_id = kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) class RoutingStorageContainerProperties(msrest.serialization.Model):", "See License.txt in the project root for license information. #", "{'key': 'type', 'type': 'str'}, } def __init__( self, **kwargs ):", "{'readonly': True}, 'thumbprint': {'readonly': True}, 'is_verified': {'readonly': True}, 'created': {'readonly':", "import msrest.serialization class CertificateBodyDescription(msrest.serialization.Model): \"\"\"The JSON-serialized X509 Certificate. :param certificate:", "str] \"\"\" _validation = { 'id': {'readonly': True}, 'name': {'readonly':", "None self.type = None class CertificateListDescription(msrest.serialization.Model): \"\"\"The JSON-serialized array of", "excluded during export. :type exclude_keys: bool \"\"\" _validation = {", "group of the service bus topic endpoint. :type resource_group: str", "'IotHubSkuInfo'}, } def __init__( self, **kwargs ): super(IotHubDescription, self).__init__(**kwargs) self.etag", "): super(IotHubSkuInfo, self).__init__(**kwargs) self.name = kwargs['name'] self.tier = None self.capacity", "{'readonly': True}, } _attribute_map = { 'properties': {'key': 'properties', 'type':", "max_delivery_count: int :param default_ttl_as_iso8601: The default time to live for", "Localized value of name. :type localized_value: str \"\"\" _attribute_map =", "__init__( self, **kwargs ): super(IotHubNameAvailabilityInfo, self).__init__(**kwargs) self.name_available = None self.reason", "service bus queue endpoint types. All required parameters must be", ":vartype description: str \"\"\" _validation = { 'provider': {'readonly': True},", "groups objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EventHubConsumerGroupInfo] :ivar next_link: The next link.", "secondary key. :type secondary_key: str :param rights: Required. The permissions", "'service_bus_topics': {'key': 'serviceBusTopics', 'type': '[RoutingServiceBusTopicEndpointProperties]'}, 'event_hubs': {'key': 'eventHubs', 'type': '[RoutingEventHubProperties]'},", "job. Possible values include: \"unknown\", \"export\", \"import\", \"backup\", \"readDeviceProperties\", \"writeDeviceProperties\",", "): super(RoutingStorageContainerProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id", "{'key': 'value', 'type': '[EndpointHealthData]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, }", "IotHub type. :type current_value: int :param limit: Numerical limit on", "Possible values include: \"Free\", \"Standard\", \"Basic\". :vartype tier: str or", "endpoint. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#device-to-cloud-messages. :type partition_count: int :ivar partition_ids: The", "_validation = { 'input_blob_container_uri': {'required': True}, 'output_blob_container_uri': {'required': True}, }", "reason message. :type message: str \"\"\" _validation = { 'name_available':", "the IoT hub routes the messages to, based on the", "None self.operation = None self.description = None class OperationInputs(msrest.serialization.Model): \"\"\"Input", "be ignored when sending a request. :param value: The list", ":vartype message: str :ivar details: The error details. :vartype details:", "it is provided in the response body, it must also", "example, DeviceMessages. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\".", "paired region and also the region where the IoT hub", "or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSku :ivar tier: The billing tier for the IoT", "int :param max_chunk_size_in_bytes: Maximum number of bytes for each blob", "} def __init__( self, **kwargs ): super(IotHubLocationDescription, self).__init__(**kwargs) self.location =", "'str'}, } def __init__( self, **kwargs ): super(RoutingServiceBusQueueEndpointProperties, self).__init__(**kwargs) self.connection_string", "self.action = kwargs['action'] self.ip_mask = kwargs['ip_mask'] class JobResponse(msrest.serialization.Model): \"\"\"The properties", "# Changes may cause incorrect behavior and will be lost", "str :ivar etag: The etag. :vartype etag: str \"\"\" _validation", "enrichment is applied to the message. :type endpoint_names: list[str] \"\"\"", "number of bytes for each blob written to storage. Value", "'endTimeUtc', 'type': 'rfc-1123'}, 'type': {'key': 'type', 'type': 'str'}, 'status': {'key':", "certificate's last update date and time. :vartype updated: ~datetime.datetime :param", "Event Hub-compatible consumer group name. :vartype name: str :ivar type:", "array of IotHubQuotaMetricInfo objects with a next link. Variables are", "where the IoT hub can failover to. Possible values include:", "that is used as a fall-back route when none of", "self.routes = kwargs.get('routes', None) class TestRouteInput(msrest.serialization.Model): \"\"\"Input for testing route.", "description of an X509 CA Certificate. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificateProperties :ivar", "populated in order to send to Azure. :param key_name: Required.", "when IoT Hub has established an eventually consistent state of", "satisfy the condition are routed to. Currently only 1 endpoint", "'feedback': {'key': 'feedback', 'type': 'FeedbackProperties'}, } def __init__( self, **kwargs", "kwargs.get('body', None) self.app_properties = kwargs.get('app_properties', None) self.system_properties = kwargs.get('system_properties', None)", "def __init__( self, **kwargs ): super(TagsResource, self).__init__(**kwargs) self.tags = kwargs.get('tags',", "value: list[~azure.mgmt.iothub.v2019_11_04.models.Operation] :ivar next_link: URL to get the next set", "} def __init__( self, **kwargs ): super(TestRouteResult, self).__init__(**kwargs) self.result =", "the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#cloud-to-device-messages. :type ttl_as_iso8601: ~datetime.timedelta :param", "'type': 'str'}, } def __init__( self, **kwargs ): super(RoutingStorageContainerProperties, self).__init__(**kwargs)", "= kwargs.get('resource_group', None) class RoutingMessage(msrest.serialization.Model): \"\"\"Routing message. :param body: Body", "\"\"\" _attribute_map = { 'certificate': {'key': 'certificate', 'type': 'str'}, }", "the retrial period. See IoT Hub metrics to identify errors", "root for license information. # Code generated by Microsoft (R)", ":type system_properties: dict[str, str] \"\"\" _attribute_map = { 'body': {'key':", "'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubQuotaMetricInfoListResult,", ":param value: The list of shared access policies. :type value:", "Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _attribute_map = { 'routing_source': {'key':", "'RoutingEndpoints'}, 'routes': {'key': 'routes', 'type': '[RouteProperties]'}, 'fallback_route': {'key': 'fallbackRoute', 'type':", "ETag convention. :type etag: str :param properties: IotHub properties. :type", "service bus topic endpoint. :type subscription_id: str :param resource_group: The", "The connection string of the service bus topic endpoint. :type", "time. :vartype updated: ~datetime.datetime :ivar verification_code: The certificate's verification code", "kwargs.get('twin', None) class TestRouteResult(msrest.serialization.Model): \"\"\"Result of testing one route. :param", "{ 'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'}, 'default_ttl_as_iso8601': {'key': 'defaultTtlAsIso8601', 'type':", "'maximum', 'type': 'long'}, 'default': {'key': 'default', 'type': 'long'}, 'scale_type': {'key':", "retention_time_in_days: long :param partition_count: The number of partitions for receiving", "to blobs. Supported values are 'avro', 'avrodeflate', and 'JSON'. Default", "\"\"\" _validation = { 'total_device_count': {'readonly': True}, 'enabled_device_count': {'readonly': True},", "the messages that satisfy the condition are routed to. Currently", "all routes. :param routes: JSON-serialized array of matched routes. :type", "{ 'total_device_count': {'readonly': True}, 'enabled_device_count': {'readonly': True}, 'disabled_device_count': {'readonly': True},", "\"readDeviceProperties\", \"writeDeviceProperties\", \"updateDeviceConfiguration\", \"rebootDevice\", \"factoryResetDevice\", \"firmwareUpdate\". :vartype type: str or", "{ 'name_available': {'readonly': True}, 'reason': {'readonly': True}, } _attribute_map =", "str :param event_hub_endpoints: The Event Hub-compatible endpoint properties. The only", "'type': '[UserSubscriptionQuota]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(", "and also the region where the IoT hub can failover", "quota metric. :vartype current_value: long :ivar max_value: The maximum value", "{'key': 'value', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, }", "the IoT hub is currently provisioned. The secondary region is", "be ignored when sending a request. :ivar provider: Service provider:", ":vartype resource: str :ivar operation: Name of the operation. :vartype", "name of the resource group of the service bus queue", ":type certificate: str \"\"\" _attribute_map = { 'certificate': {'key': 'certificate',", "is where the IoT hub is currently provisioned. The secondary", "happened. :type line: int :param column: Column where the route", "= { 'name': {'readonly': True}, } _attribute_map = { 'name':", "for the blob. Default format is {iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All parameters are", "'tier': {'key': 'tier', 'type': 'str'}, 'capacity': {'key': 'capacity', 'type': 'long'},", "\"\"\" _validation = { 'name': {'required': True}, } _attribute_map =", "the shared access policy. Possible values include: \"RegistryRead\", \"RegistryWrite\", \"ServiceConnect\",", "Hub-compatible endpoint. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#device-to-cloud-messages. :type partition_count: int :ivar partition_ids:", "{'key': 'subscriptionId', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, }", "super(ExportDevicesRequest, self).__init__(**kwargs) self.export_blob_container_uri = kwargs['export_blob_container_uri'] self.exclude_keys = kwargs['exclude_keys'] class FailoverInput(msrest.serialization.Model):", "of 10 custom endpoints are allowed across all endpoint types", "_attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, } def", "def __init__( self, **kwargs ): super(IotHubNameAvailabilityInfo, self).__init__(**kwargs) self.name_available = None", "= kwargs.get('ttl_as_iso8601', None) self.max_delivery_count = kwargs.get('max_delivery_count', None) class ImportDevicesRequest(msrest.serialization.Model): \"\"\"Use", ":type authorization_policies: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :param ip_filter_rules: The IP filter rules. :type", "'FallbackRouteProperties'}, 'enrichments': {'key': 'enrichments', 'type': '[EnrichmentProperties]'}, } def __init__( self,", "'status_message': {'key': 'statusMessage', 'type': 'str'}, 'parent_job_id': {'key': 'parentJobId', 'type': 'str'},", "{'key': 'resource', 'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'}, 'description':", "'route': {'required': True}, } _attribute_map = { 'message': {'key': 'message',", "quota metrics objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubQuotaMetricInfo] :ivar next_link: The next", "**kwargs ): super(StorageEndpointProperties, self).__init__(**kwargs) self.sas_ttl_as_iso8601 = kwargs.get('sas_ttl_as_iso8601', None) self.connection_string =", "routes that matched. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties \"\"\" _attribute_map = {", "None) self.ip_filter_rules = kwargs.get('ip_filter_rules', None) self.provisioning_state = None self.state =", "sending messages for the retrial period. See IoT Hub metrics", "name: Required. The name of the SKU. Possible values include:", "True}, 'locations': {'readonly': True}, } _attribute_map = { 'authorization_policies': {'key':", "for paid hubs and a maximum of 5 routing rules", "that the endpoint is not accepting messages as expected and", "self.key_name = kwargs['key_name'] self.primary_key = kwargs.get('primary_key', None) self.secondary_key = kwargs.get('secondary_key',", "messages to endpoints. All required parameters must be populated in", "'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag', 'type':", "include: \"unknown\", \"export\", \"import\", \"backup\", \"readDeviceProperties\", \"writeDeviceProperties\", \"updateDeviceConfiguration\", \"rebootDevice\", \"factoryResetDevice\",", "scaling enabled. Possible values include: \"Automatic\", \"Manual\", \"None\". :vartype scale_type:", "container_name: str \"\"\" _validation = { 'connection_string': {'required': True}, 'container_name':", "\"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type routing_source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param message:", "See: https://docs.microsoft.com/azure/iot- hub/iot-hub-devguide-file-upload. :type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601: The period", "not meet any of the conditions specified in the 'routes'", "**kwargs ): super(Name, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.localized_value =", "an import of all devices in the hub. All required", "based on the routing rules. :type storage_containers: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerProperties] \"\"\" _attribute_map", "tags. Twin Tags. :type tags: object :param properties: :type properties:", "is_enabled: Required. Used to specify whether a route is enabled.", "The list of user-provided routing rules that the IoT hub", "user to update the tags on an IoT Hub instance.", "The Event Hub-compatible consumer group identifier. :vartype id: str :ivar", "or secondary. The primary region is where the IoT hub", "~azure.mgmt.iothub.v2019_11_04.models.RouteProperties :param twin: Routing Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\"", "requests captured by this rule. Possible values include: \"Accept\", \"Reject\".", "allowed for free hubs. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.RouteProperties] :param fallback_route: The", "present in the dictionary while making create or update calls", "the same as the actual queue name. :type name: str", "is allowed. :type endpoint_names: list[str] :param is_enabled: Required. Used to", "value: list[~azure.mgmt.iothub.v2019_11_04.models.JobResponse] :ivar next_link: The next link. :vartype next_link: str", "'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag':", "the resource group of the storage account. :type resource_group: str", "The etag. :vartype etag: str \"\"\" _validation = { 'id':", "list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :ivar next_link: The next link. :vartype next_link: str \"\"\"", "the server, and will be ignored when sending a request.", "None) self.role = kwargs.get('role', None) class IotHubNameAvailabilityInfo(msrest.serialization.Model): \"\"\"The properties indicating", "value: JSON-serialized array of Endpoint health data. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthData]", "the route error happened. :type column: int \"\"\" _attribute_map =", "None) self.enable_file_upload_notifications = kwargs.get('enable_file_upload_notifications', None) self.cloud_to_device = kwargs.get('cloud_to_device', None) self.comments", "type: Response type. :type type: str :param unit: Unit of", "self.name = None self.etag = None self.type = None class", "MatchedRoute(msrest.serialization.Model): \"\"\"Routes that matched. :param properties: Properties of routes that", "'dead' status shows that the endpoint is not accepting messages,", "of the resource group of the service bus queue endpoint.", "{'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag':", "identifier of the event hub endpoint. :type subscription_id: str :param", "feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type max_delivery_count: int \"\"\" _validation", "to, based on the routing rules. This list does not", "kwargs.get('twin', None) class TestAllRoutesResult(msrest.serialization.Model): \"\"\"Result of testing all routes. :param", "Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect", "= { 'value': {'key': 'value', 'type': '[IotHubQuotaMetricInfo]'}, 'next_link': {'key': 'nextLink',", "be provided as a header per the normal ETag convention.", "True}, 'capacity': {'required': True}, } _attribute_map = { 'resource_type': {'key':", "type: str \"\"\" _validation = { 'id': {'readonly': True}, 'name':", "'appProperties', 'type': '{str}'}, 'system_properties': {'key': 'systemProperties', 'type': '{str}'}, } def", "{'required': True}, 'endpoint_names': {'required': True, 'min_items': 1}, } _attribute_map =", "'str'}, } def __init__( self, **kwargs ): super(CertificatePropertiesWithNonce, self).__init__(**kwargs) self.subject", "of the storage account. :type resource_group: str :param container_name: Required.", "to live for cloud-to-device messages in the device queue. See:", "'type': 'str'}, 'rights': {'key': 'rights', 'type': 'str'}, } def __init__(", "= kwargs.get('resource_group', None) self.container_name = kwargs['container_name'] self.file_name_format = kwargs.get('file_name_format', None)", "{'key': 'certificate', 'type': 'str'}, } def __init__( self, **kwargs ):", "blob. Default format is {iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All parameters are mandatory but", "'partition_ids': {'readonly': True}, 'path': {'readonly': True}, 'endpoint': {'readonly': True}, }", "time to live for cloud-to-device messages in the device queue.", "storage_containers: The list of storage container endpoints that IoT hub", "'host_name': {'readonly': True}, 'locations': {'readonly': True}, } _attribute_map = {", "'state': {'readonly': True}, 'host_name': {'readonly': True}, 'locations': {'readonly': True}, }", "code. :vartype code: str :ivar http_status_code: The HTTP status code.", "metrics objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubQuotaMetricInfo] :ivar next_link: The next link.", "None) self.routing = kwargs.get('routing', None) self.storage_endpoints = kwargs.get('storage_endpoints', None) self.messaging_endpoints", "be ignored when sending a request. :param value: JSON-serialized array", "'type': 'RouteErrorPosition'}, } def __init__( self, **kwargs ): super(RouteErrorRange, self).__init__(**kwargs)", "'CloudToDeviceProperties'}, 'comments': {'key': 'comments', 'type': 'str'}, 'features': {'key': 'features', 'type':", "'details', 'type': 'str'}, } def __init__( self, **kwargs ): super(ErrorDetails,", "to be applied to. For example, DeviceMessages. Possible values include:", "self.service_bus_queues = kwargs.get('service_bus_queues', None) self.service_bus_topics = kwargs.get('service_bus_topics', None) self.event_hubs =", "} _attribute_map = { 'sas_ttl_as_iso8601': {'key': 'sasTtlAsIso8601', 'type': 'duration'}, 'connection_string':", "error happened. :param line: Line where the route error happened.", "self.properties = kwargs.get('properties', None) self.sku = kwargs['sku'] class IotHubDescriptionListResult(msrest.serialization.Model): \"\"\"The", "is retrying to send data to this endpoint. The status", "True}, 'endpoint': {'readonly': True}, } _attribute_map = { 'retention_time_in_days': {'key':", "'filter_name': {'required': True}, 'action': {'required': True}, 'ip_mask': {'required': True}, }", "be ignored when sending a request. :param value: :type value:", "including the challenge nonce issued for the Proof-Of-Possession flow. Variables", "self.health_status = kwargs.get('health_status', None) class EndpointHealthDataListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of", "} def __init__( self, **kwargs ): super(CertificateVerificationDescription, self).__init__(**kwargs) self.certificate =", "Azure resource. Variables are only populated by the server, and", "str \"\"\" _validation = { 'connection_string': {'required': True}, 'container_name': {'required':", "self).__init__(**kwargs) self.failover_region = kwargs['failover_region'] class FallbackRouteProperties(msrest.serialization.Model): \"\"\"The properties of the", "testing one route. :param result: Result of testing route. Possible", "'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'etag': {'key': 'etag', 'type':", "list of endpoints to which the messages that satisfy the", "self, **kwargs ): super(ExportDevicesRequest, self).__init__(**kwargs) self.export_blob_container_uri = kwargs['export_blob_container_uri'] self.exclude_keys =", "'description': {'readonly': True}, } _attribute_map = { 'provider': {'key': 'provider',", "None self.details = None class EventHubConsumerGroupInfo(msrest.serialization.Model): \"\"\"The properties of the", "self.localized_value = kwargs.get('localized_value', None) class Operation(msrest.serialization.Model): \"\"\"IoT Hub REST API", "None) self.locations = None class IotHubQuotaMetricInfo(msrest.serialization.Model): \"\"\"Quota metrics properties. Variables", "service bus queue endpoint. :type connection_string: str :param name: Required.", "= kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class RoutingEndpoints(msrest.serialization.Model): \"\"\"The properties related", "time. :vartype created: ~datetime.datetime :ivar updated: The certificate's last update", "queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages. :type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601: The period", "send to Azure. :ivar resource_type: The type of the resource.", "\"\"\"The properties of the EventHubConsumerGroupInfo object. Variables are only populated", "'type': 'str'}, 'features': {'key': 'features', 'type': 'str'}, 'locations': {'key': 'locations',", "operation. Variables are only populated by the server, and will", "name: str :param subscription_id: The subscription identifier of the storage", "= None class EventHubProperties(msrest.serialization.Model): \"\"\"The properties of the provisioned Event", "self.cloud_to_device = kwargs.get('cloud_to_device', None) self.comments = kwargs.get('comments', None) self.features =", "'currentValue', 'type': 'long'}, 'max_value': {'key': 'maxValue', 'type': 'long'}, } def", "subscription_id: The subscription identifier of the event hub endpoint. :type", "'path': {'key': 'path', 'type': 'str'}, 'endpoint': {'key': 'endpoint', 'type': 'str'},", "True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True},", "self.connection_string = kwargs['connection_string'] self.container_name = kwargs['container_name'] class TagsResource(msrest.serialization.Model): \"\"\"A container", "specified. :type container_name: str \"\"\" _validation = { 'connection_string': {'required':", "kwargs.get('message', None) class IotHubProperties(msrest.serialization.Model): \"\"\"The properties of an IoT hub.", "only possible keys to this dictionary is events. This key", "happened. :type column: int \"\"\" _attribute_map = { 'line': {'key':", "Required. The connection string of the storage account. :type connection_string:", "include: \"undefined\", \"false\", \"true\". :type result: str or ~azure.mgmt.iothub.v2019_11_04.models.TestResultStatus :param", "self.certificate = kwargs.get('certificate', None) class CertificateDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables", "{'key': 'resourceType', 'type': 'str'}, 'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'}, 'capacity':", "{ 'value': {'key': 'value', 'type': 'str'}, 'localized_value': {'key': 'localizedValue', 'type':", "recovery (DR) paired region and also the region where the", "= kwargs.get('reported', None) class SharedAccessSignatureAuthorizationRule(msrest.serialization.Model): \"\"\"The properties of an IoT", ":param container_name: Required. The name of storage container in the", "of the job. Possible values include: \"unknown\", \"export\", \"import\", \"backup\",", "on the routing rules. This list does not include the", "of bytes for each blob written to storage. Value should", "kwargs.get('message', None) self.twin = kwargs.get('twin', None) class TestAllRoutesResult(msrest.serialization.Model): \"\"\"Result of", "matched. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties \"\"\" _attribute_map = { 'properties': {'key':", "time. :vartype updated: ~datetime.datetime :param certificate: The certificate content. :type", "Event Hub-compatible consumer group identifier. :vartype id: str :ivar name:", "connectionString specified. :type container_name: str \"\"\" _validation = { 'connection_string':", ":type properties: ~azure.mgmt.iothub.v2019_11_04.models.IotHubProperties :param sku: Required. IotHub SKU info. :type", "'type': 'int'}, 'encoding': {'key': 'encoding', 'type': 'str'}, } def __init__(", "job identifier of the parent job, if any. :vartype parent_job_id:", "The entity tag. :vartype etag: str :ivar type: The resource", "'RoutingTwin'}, } def __init__( self, **kwargs ): super(TestRouteInput, self).__init__(**kwargs) self.message", "'partition_count': {'key': 'partitionCount', 'type': 'int'}, 'partition_ids': {'key': 'partitionIds', 'type': '[str]'},", "self.sku = kwargs['sku'] class IotHubDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubDescription", "routing rules. :type service_bus_queues: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusQueueEndpointProperties] :param service_bus_topics: The list of", "= kwargs.get('properties', None) self.sku = kwargs['sku'] class IotHubDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized", "include: \"None\", \"DeviceManagement\". :type features: str or ~azure.mgmt.iothub.v2019_11_04.models.Capabilities :ivar locations:", "to apply the fallback route. If the condition is not", "list of shared access policies with a next link. Variables", "'locations': {'readonly': True}, } _attribute_map = { 'authorization_policies': {'key': 'authorizationPolicies',", "not be the same as the actual queue name. :type", "'excludeKeys', 'type': 'bool'}, } def __init__( self, **kwargs ): super(ExportDevicesRequest,", "The lock duration for the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages. :type", "): super(IotHubDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None", "'type': 'str'}, 'action': {'key': 'action', 'type': 'str'}, 'ip_mask': {'key': 'ipMask',", "'subscriptionId', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, } def", "current_value: Current number of IotHub type. :type current_value: int :param", "hub shared access policy. All required parameters must be populated", "\"Invalid\", \"AlreadyExists\". :vartype reason: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubNameUnavailabilityReason :param message: The", "the condition are routed. Currently only one endpoint is allowed.", "an export of all devices in the IoT hub. All", "IoT hub attempts to deliver a message. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload. :type", "output blob container URI. :type output_blob_container_uri: str \"\"\" _validation =", "Line where the route error happened. :type line: int :param", "unique across endpoint types. :type name: str :param subscription_id: The", "} def __init__( self, **kwargs ): super(RoutingServiceBusTopicEndpointProperties, self).__init__(**kwargs) self.connection_string =", "{ 'properties': {'key': 'properties', 'type': 'CertificatePropertiesWithNonce'}, 'id': {'key': 'id', 'type':", "'type': '{str}'}, } def __init__( self, **kwargs ): super(RoutingMessage, self).__init__(**kwargs)", "kwargs.get('event_hub_endpoints', None) self.routing = kwargs.get('routing', None) self.storage_endpoints = kwargs.get('storage_endpoints', None)", "{ 'value': {'key': 'value', 'type': '[IotHubQuotaMetricInfo]'}, 'next_link': {'key': 'nextLink', 'type':", "'str'}, 'location': {'key': 'location', 'type': 'RouteErrorRange'}, } def __init__( self,", "} def __init__( self, **kwargs ): super(TestRouteInput, self).__init__(**kwargs) self.message =", "self.next_link = None class IotHubSkuDescription(msrest.serialization.Model): \"\"\"SKU properties. Variables are only", "'value': {'required': True}, 'endpoint_names': {'required': True, 'min_items': 1}, } _attribute_map", "bool :param cloud_to_device: The IoT hub cloud-to-device messaging properties. :type", "**kwargs ): super(EventHubConsumerGroupInfo, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id =", ":ivar next_link: The next link. :vartype next_link: str \"\"\" _validation", "{ 'value': {'key': 'value', 'type': '[EventHubConsumerGroupInfo]'}, 'next_link': {'key': 'nextLink', 'type':", "will be ignored when sending a request. :param authorization_policies: The", "\"\"\" _attribute_map = { 'routes': {'key': 'routes', 'type': '[MatchedRoute]'}, }", ":param subscription_id: The subscription identifier of the service bus topic", "None) class RouteProperties(msrest.serialization.Model): \"\"\"The properties of a routing rule that", "self.batch_frequency_in_seconds = kwargs.get('batch_frequency_in_seconds', None) self.max_chunk_size_in_bytes = kwargs.get('max_chunk_size_in_bytes', None) self.encoding =", "actual queue name. :type name: str :param subscription_id: The subscription", "verification code that will be used for proof of possession.", "{'key': 'messagingEndpoints', 'type': '{MessagingEndpointProperties}'}, 'enable_file_upload_notifications': {'key': 'enableFileUploadNotifications', 'type': 'bool'}, 'cloud_to_device':", "partitions for receiving device-to-cloud messages in the Event Hub-compatible endpoint.", "connection string of the service bus topic endpoint. :type connection_string:", "= None self.reason = None self.message = kwargs.get('message', None) class", "'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, **kwargs", ":param max_delivery_count: The max delivery count for cloud-to-device messages in", "**kwargs ): super(MatchedRoute, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) class MessagingEndpointProperties(msrest.serialization.Model):", "'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'},", "established an eventually consistent state of health. The 'dead' status", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param endpoint_names: Required. The list", "None) self.endpoint_names = kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class RoutingEndpoints(msrest.serialization.Model): \"\"\"The", ":param start: Start where the route error happened. :type start:", "'defaultTtlAsIso8601', 'type': 'duration'}, 'feedback': {'key': 'feedback', 'type': 'FeedbackProperties'}, } def", "): super(IotHubQuotaMetricInfo, self).__init__(**kwargs) self.name = None self.current_value = None self.max_value", "file_name_format: str :param batch_frequency_in_seconds: Time interval at which blobs are", "~azure.mgmt.iothub.v2019_11_04.models.CertificateProperties :ivar id: The resource identifier. :vartype id: str :ivar", "self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties', None) class RoutingTwinProperties(msrest.serialization.Model):", "'str'}, 'exclude_keys': {'key': 'excludeKeys', 'type': 'bool'}, } def __init__( self,", "'properties': {'key': 'properties', 'type': 'CertificatePropertiesWithNonce'}, 'id': {'key': 'id', 'type': 'str'},", "that satisfy the condition are routed. Currently only one endpoint", "flow. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificatePropertiesWithNonce :ivar id: The resource identifier. :vartype", "be populated in order to send to Azure. :param failover_region:", "'tags', 'type': '{str}'}, } def __init__( self, **kwargs ): super(Resource,", "self.message = kwargs.get('message', None) self.severity = kwargs.get('severity', None) self.location =", "_attribute_map = { 'code': {'key': 'code', 'type': 'str'}, 'http_status_code': {'key':", "None self.is_verified = None self.created = None self.updated = None", "= None self.default = None self.scale_type = None class Resource(msrest.serialization.Model):", "making create or update calls for the IoT hub. :type", "{'required': True}, 'tier': {'readonly': True}, } _attribute_map = { 'name':", "{'key': 'state', 'type': 'str'}, 'host_name': {'key': 'hostName', 'type': 'str'}, 'event_hub_endpoints':", "IoT Hub operations supported by the Microsoft.Devices resource provider. :vartype", "can only include alphanumeric characters, periods, underscores, hyphens and has", "key_name: Required. The name of the shared access policy. :type", "str :param health_status: Health statuses have following meanings. The 'healthy'", "{'key': 'localizedValue', 'type': 'str'}, } def __init__( self, **kwargs ):", "kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) class RoutingStorageContainerProperties(msrest.serialization.Model): \"\"\"The properties", "provide parameters when requesting an import of all devices in", ":type limit: int :param name: IotHub type. :type name: ~azure.mgmt.iothub.v2019_11_04.models.Name", "= kwargs.get('message', None) self.severity = kwargs.get('severity', None) self.location = kwargs.get('location',", "hub comments. :type comments: str :param features: The capabilities and", "the route that is used as a fall-back route when", "self, **kwargs ): super(TestRouteInput, self).__init__(**kwargs) self.message = kwargs.get('message', None) self.route", "__init__( self, **kwargs ): super(RouteProperties, self).__init__(**kwargs) self.name = kwargs['name'] self.source", "kwargs.get('condition', None) self.endpoint_names = kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class FeedbackProperties(msrest.serialization.Model):", "endpoints. See: https://aka.ms/telemetryoneventgrid. :type enrichments: list[~azure.mgmt.iothub.v2019_11_04.models.EnrichmentProperties] \"\"\" _attribute_map = {", "the endpoint. :type endpoint_id: str :param health_status: Health statuses have", "be populated in order to send to Azure. :param filter_name:", "The provisioning state. :vartype provisioning_state: str :ivar state: The hub", "): super(RouteErrorPosition, self).__init__(**kwargs) self.line = kwargs.get('line', None) self.column = kwargs.get('column',", "} _attribute_map = { 'value': {'key': 'value', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'next_link':", "of time for which a message is available to consume", ":type source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition that", "{'key': 'provisioningState', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'host_name':", ":param endpoint_names: Required. The list of endpoints for which the", "blob written to storage. Value should be between 10485760(10MB) and", "'str'}, 'primary_key': {'key': 'primaryKey', 'type': 'str'}, 'secondary_key': {'key': 'secondaryKey', 'type':", "'end_time_utc': {'key': 'endTimeUtc', 'type': 'rfc-1123'}, 'type': {'key': 'type', 'type': 'str'},", ":type role: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubReplicaRoleType \"\"\" _attribute_map = { 'location':", "kwargs.get('properties', None) class RoutingTwinProperties(msrest.serialization.Model): \"\"\"RoutingTwinProperties. :param desired: Twin desired properties.", "list[~azure.mgmt.iothub.v2019_11_04.models.IotHubQuotaMetricInfo] :ivar next_link: The next link. :vartype next_link: str \"\"\"", "by default. For grammar, See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param", "IotHubSkuDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubSkuDescription objects with a next", "super(TestRouteInput, self).__init__(**kwargs) self.message = kwargs.get('message', None) self.route = kwargs['route'] self.twin", "~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param message: Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param twin:", "The routing related properties of the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging.", "whether the provided name is available. :vartype name_available: bool :ivar", "'bool'}, } def __init__( self, **kwargs ): super(ExportDevicesRequest, self).__init__(**kwargs) self.export_blob_container_uri", "code: The error code. :vartype code: str :ivar http_status_code: The", ":param value: :type value: list[~azure.mgmt.iothub.v2019_11_04.models.UserSubscriptionQuota] :ivar next_link: :vartype next_link: str", "properties of the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :type routing: ~azure.mgmt.iothub.v2019_11_04.models.RoutingProperties", "self.ip_mask = kwargs['ip_mask'] class JobResponse(msrest.serialization.Model): \"\"\"The properties of the Job", "The connection string of the event hub endpoint. :type connection_string:", "quota response. Variables are only populated by the server, and", "queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to- device-messages. :type max_delivery_count: int :param default_ttl_as_iso8601: The", "'type': '[IotHubLocationDescription]'}, } def __init__( self, **kwargs ): super(IotHubProperties, self).__init__(**kwargs)", "all routes. :param routing_source: Routing source. Possible values include: \"Invalid\",", "types for free hubs. :param service_bus_queues: The list of Service", "'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'etag': {'key':", "'value', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def", "messages to built-in and custom endpoints. A maximum of 100", ":vartype type: str :param location: Required. The resource location. :type", "values include: \"undefined\", \"false\", \"true\". :type result: str or ~azure.mgmt.iothub.v2019_11_04.models.TestResultStatus", "self.retention_time_in_days = kwargs.get('retention_time_in_days', None) self.partition_count = kwargs.get('partition_count', None) self.partition_ids =", "kwargs.get('properties', None) self.id = None self.name = None self.type =", "'event_hub_endpoints': {'key': 'eventHubEndpoints', 'type': '{EventHubProperties}'}, 'routing': {'key': 'routing', 'type': 'RoutingProperties'},", "\"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition:", "self.value = kwargs.get('value', None) self.next_link = None class IotHubSkuInfo(msrest.serialization.Model): \"\"\"Information", "'type': 'IotHubProperties'}, 'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'}, } def __init__(", "{'key': 'properties', 'type': 'CertificateProperties'}, 'id': {'key': 'id', 'type': 'str'}, 'name':", "None) self.properties = kwargs.get('properties', None) class RoutingTwinProperties(msrest.serialization.Model): \"\"\"RoutingTwinProperties. :param desired:", "**kwargs ): super(EndpointHealthData, self).__init__(**kwargs) self.endpoint_id = kwargs.get('endpoint_id', None) self.health_status =", "the routing rules. A maximum of 10 custom endpoints are", "None class IotHubQuotaMetricInfo(msrest.serialization.Model): \"\"\"Quota metrics properties. Variables are only populated", "kwargs.get('retention_time_in_days', None) self.partition_count = kwargs.get('partition_count', None) self.partition_ids = None self.path", "region is the Azure disaster recovery (DR) paired region and", "self.routing_source = kwargs.get('routing_source', None) self.message = kwargs.get('message', None) self.twin =", "'messaging_endpoints': {'key': 'messagingEndpoints', 'type': '{MessagingEndpointProperties}'}, 'enable_file_upload_notifications': {'key': 'enableFileUploadNotifications', 'type': 'bool'},", "in the 'routes' section get routed to the built-in eventhub", "} _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display':", "= None class CloudToDeviceProperties(msrest.serialization.Model): \"\"\"The IoT hub cloud-to-device messaging properties.", "class OperationListResult(msrest.serialization.Model): \"\"\"Result of the request to list IoT Hub", "is provided in the response body, it must also be", "self.max_value = None class IotHubQuotaMetricInfoListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubQuotaMetricInfo", "URI. :type export_blob_container_uri: str :param exclude_keys: Required. The value indicating", "value: IotHub type. :type value: str :param localized_value: Localized value", "http_status_code: str :ivar message: The error message. :vartype message: str", "provisioned. The secondary region is the Azure disaster recovery (DR)", "when requesting manual Failover for a hub. All required parameters", "supported by the Microsoft.Devices resource provider. :vartype value: list[~azure.mgmt.iothub.v2019_11_04.models.Operation] :ivar", "Failover for a hub. All required parameters must be populated", "description of the IoT hub. Variables are only populated by", "certificate's expiration date and time. :vartype expiry: ~datetime.datetime :ivar thumbprint:", "Value should be between 60 and 720 seconds. Default value", "matched. :param properties: Properties of routes that matched. :type properties:", "'output_blob_container_uri': {'key': 'outputBlobContainerUri', 'type': 'str'}, } def __init__( self, **kwargs", "value is 314572800(300MB). :type max_chunk_size_in_bytes: int :param encoding: Encoding that", ":param value: The array of Certificate objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.CertificateDescription]", "'type': 'str'}, } def __init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRuleListResult, self).__init__(**kwargs)", "endpoint types. All required parameters must be populated in order", "'properties': {'key': 'properties', 'type': 'IotHubProperties'}, 'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'},", "connection string for the Azure Storage account to which files", "TestRouteResultDetails(msrest.serialization.Model): \"\"\"Detailed result of testing a route. :param compilation_errors: JSON-serialized", "for free hubs. :type endpoints: ~azure.mgmt.iothub.v2019_11_04.models.RoutingEndpoints :param routes: The list", "= { 'value': {'key': 'value', 'type': '[EndpointHealthData]'}, 'next_link': {'key': 'nextLink',", "If True, file upload notifications are enabled. :type enable_file_upload_notifications: bool", ":ivar operation: Name of the operation. :vartype operation: str :ivar", "{'required': True}, } _attribute_map = { 'resource_type': {'key': 'resourceType', 'type':", "enabled devices in the identity registry. :vartype enabled_device_count: long :ivar", "kwargs.get('service_bus_queues', None) self.service_bus_topics = kwargs.get('service_bus_topics', None) self.event_hubs = kwargs.get('event_hubs', None)", "endpoint is not accepting messages as expected and IoT Hub", "'[str]'}, 'is_enabled': {'key': 'isEnabled', 'type': 'bool'}, } def __init__( self,", "= None self.updated = None self.certificate = kwargs.get('certificate', None) class", "= kwargs['name'] class OperationListResult(msrest.serialization.Model): \"\"\"Result of the request to list", "length of 64 characters. The following names are reserved: events,", "The properties of the route that is used as a", "The value indicating whether keys should be excluded during export.", "to. :type failover_region: str \"\"\" _validation = { 'failover_region': {'required':", "} def __init__( self, **kwargs ): super(OperationListResult, self).__init__(**kwargs) self.value =", ":param value: The array of quota metrics objects. :type value:", "= kwargs['rights'] class SharedAccessSignatureAuthorizationRuleListResult(msrest.serialization.Model): \"\"\"The list of shared access policies", "CA Certificate. Variables are only populated by the server, and", "when sending a request. :param retention_time_in_days: The retention time for", "\"import\", \"backup\", \"readDeviceProperties\", \"writeDeviceProperties\", \"updateDeviceConfiguration\", \"rebootDevice\", \"factoryResetDevice\", \"firmwareUpdate\". :vartype type:", "\"\"\" _validation = { 'name': {'required': True}, 'tier': {'readonly': True},", "__init__( self, **kwargs ): super(TestRouteResult, self).__init__(**kwargs) self.result = kwargs.get('result', None)", "applied to, such as DeviceMessages. Possible values include: \"Invalid\", \"DeviceMessages\",", "'location': {'required': True}, } _attribute_map = { 'id': {'key': 'id',", "route errors. :param start: Start where the route error happened.", "_attribute_map = { 'properties': {'key': 'properties', 'type': 'RouteProperties'}, } def", "self, **kwargs ): super(IotHubCapacity, self).__init__(**kwargs) self.minimum = None self.maximum =", "{ 'total_device_count': {'key': 'totalDeviceCount', 'type': 'long'}, 'enabled_device_count': {'key': 'enabledDeviceCount', 'type':", "kwargs['connection_string'] self.container_name = kwargs['container_name'] class TagsResource(msrest.serialization.Model): \"\"\"A container holding only", "valid. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload#file-upload-notification-configuration-options. :type sas_ttl_as_iso8601: ~datetime.timedelta :param connection_string: Required.", "The period of time for which a message is available", "'isEnabled', 'type': 'bool'}, } def __init__( self, **kwargs ): super(FallbackRouteProperties,", "the operation. :vartype description: str \"\"\" _validation = { 'provider':", "'type': 'str'}, } def __init__( self, **kwargs ): super(CertificatePropertiesWithNonce, self).__init__(**kwargs)", "this endpoint. The status of an unhealthy endpoint will be", "the messages to, based on the routing rules. :type service_bus_queues:", "be applied to. For example, DeviceMessages. Possible values include: \"Invalid\",", "IoT Hub has not established a connection with the endpoint.", ":param certificate: base-64 representation of the X509 leaf certificate .cer", "operation list results if there are any. :vartype next_link: str", "to the custom endpoints to which your IoT hub routes", "self).__init__(**kwargs) self.endpoint_id = kwargs.get('endpoint_id', None) self.health_status = kwargs.get('health_status', None) class", "IoT Hub uses these properties when it routes messages to", "'type': 'str'}, 'endpoint_names': {'key': 'endpointNames', 'type': '[str]'}, 'is_enabled': {'key': 'isEnabled',", "\"F1\", \"S1\", \"S2\", \"S3\", \"B1\", \"B2\", \"B3\". :type name: str", "write | action | delete}. :vartype name: str :param display:", "Link to more results. :vartype next_link: str \"\"\" _validation =", "export blob container URI. :type export_blob_container_uri: str :param exclude_keys: Required.", "{'key': 'connectionString', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'subscription_id':", "} def __init__( self, **kwargs ): super(RegistryStatistics, self).__init__(**kwargs) self.total_device_count =", "{'key': 'httpStatusCode', 'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'details':", "of the service bus topic endpoint. :type subscription_id: str :param", "\"DeviceJobLifecycleEvents\". :type routing_source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param message: Routing message.", "'str'}, } def __init__( self, **kwargs ): super(RoutingServiceBusTopicEndpointProperties, self).__init__(**kwargs) self.connection_string", "request. :ivar provider: Service provider: Microsoft Devices. :vartype provider: str", "has not established a connection with the endpoint. No messages", "kwargs.get('storage_containers', None) class RoutingEventHubProperties(msrest.serialization.Model): \"\"\"The properties related to an event", "'str'}, 'rights': {'key': 'rights', 'type': 'str'}, } def __init__( self,", "properties: dict[str, str] :ivar id: The Event Hub-compatible consumer group", "DeviceConnect\", \"RegistryRead, ServiceConnect, DeviceConnect\", \"RegistryWrite, ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect,", "def __init__( self, **kwargs ): super(IotHubLocationDescription, self).__init__(**kwargs) self.location = kwargs.get('location',", "value for this property when the enableFileUploadNotifications property is set", "'rfc-1123'}, 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, 'is_verified': {'key': 'isVerified', 'type':", "= { 'location': {'key': 'location', 'type': 'str'}, 'role': {'key': 'role',", "'RoutingProperties'}, 'storage_endpoints': {'key': 'storageEndpoints', 'type': '{StorageEndpointProperties}'}, 'messaging_endpoints': {'key': 'messagingEndpoints', 'type':", "'properties', 'type': 'CertificateProperties'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key':", "to specify whether a route is enabled. :type is_enabled: bool", "True}, 'container_name': {'required': True}, } _attribute_map = { 'sas_ttl_as_iso8601': {'key':", "be applied to, such as DeviceMessages. Possible values include: \"Invalid\",", "**kwargs ): super(OperationDisplay, self).__init__(**kwargs) self.provider = None self.resource = None", "'type': 'str'}, } def __init__( self, **kwargs ): super(IpFilterRule, self).__init__(**kwargs)", "provider: Microsoft Devices. :vartype provider: str :ivar resource: Resource Type:", "'comments': {'key': 'comments', 'type': 'str'}, 'features': {'key': 'features', 'type': 'str'},", "super(IotHubSkuDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class", "apply the fallback route. If the condition is not provided", "True}, 'status_message': {'readonly': True}, 'parent_job_id': {'readonly': True}, } _attribute_map =", "of the IoT hub to check. :type name: str \"\"\"", "when sending a request. :ivar total_device_count: The total count of", "IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload. :type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count:", ":type value: list[~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthData] :ivar next_link: Link to more results. :vartype", "None class CloudToDeviceProperties(msrest.serialization.Model): \"\"\"The IoT hub cloud-to-device messaging properties. :param", "self.etag = None self.type = None class CloudToDeviceProperties(msrest.serialization.Model): \"\"\"The IoT", "\"\"\" _validation = { 'export_blob_container_uri': {'required': True}, 'exclude_keys': {'required': True},", "RoutingProperties(msrest.serialization.Model): \"\"\"The routing related properties of the IoT hub. See:", "all endpoint types for free hubs. :type endpoints: ~azure.mgmt.iothub.v2019_11_04.models.RoutingEndpoints :param", "'containerName', 'type': 'str'}, } def __init__( self, **kwargs ): super(StorageEndpointProperties,", "= kwargs.get('certificate', None) class CertificateWithNonceDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables are", "'routes' section get routed to the built-in eventhub endpoint. :type", "1}, } _attribute_map = { 'lock_duration_as_iso8601': {'key': 'lockDurationAsIso8601', 'type': 'duration'},", "True}, 'message': {'readonly': True}, 'details': {'readonly': True}, } _attribute_map =", ":vartype name: str :ivar etag: The entity tag. :vartype etag:", "sku: Required. IotHub SKU info. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo \"\"\" _validation", "'updated', 'type': 'rfc-1123'}, 'certificate': {'key': 'certificate', 'type': 'str'}, } def", "properties of the provisioned Event Hub-compatible endpoint used by the", "\"\"\"The JSON-serialized leaf certificate. :param certificate: base-64 representation of X509", "-------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. #", "last update date and time. :vartype updated: ~datetime.datetime :param certificate:", "Required. The source to which the routing rule is to", "'value': {'key': 'value', 'type': '[CertificateDescription]'}, } def __init__( self, **kwargs", "of endpoints to which the messages that satisfy the condition", "incorrect behavior and will be lost if the code is", "= None self.failure_reason = None self.status_message = None self.parent_job_id =", "display: The object that represents the operation. :type display: ~azure.mgmt.iothub.v2019_11_04.models.OperationDisplay", ":param subscription_id: The subscription identifier of the storage account. :type", "} _attribute_map = { 'value': {'key': 'value', 'type': '[IotHubQuotaMetricInfo]'}, 'next_link':", "\"\"\"Position where the route error happened. :param line: Line where", "to Azure. :param name: Required. The name of the IoT", "comments: IoT hub comments. :type comments: str :param features: The", "kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) class RoutingMessage(msrest.serialization.Model): \"\"\"Routing message.", "{'key': 'subscriptionId', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, 'container_name':", ":type resource_group: str \"\"\" _validation = { 'connection_string': {'required': True},", "\"AlreadyExists\". :vartype reason: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubNameUnavailabilityReason :param message: The detailed", "'value', 'type': '[IotHubQuotaMetricInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def", "IotHub type. :type name: ~azure.mgmt.iothub.v2019_11_04.models.Name \"\"\" _attribute_map = { 'id':", "{'readonly': True, 'maximum': 1, 'minimum': 1}, 'maximum': {'readonly': True}, 'default':", "testing all routes. :param routes: JSON-serialized array of matched routes.", "or update calls for the IoT hub. :type event_hub_endpoints: dict[str,", "{'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, }", "= None self.next_link = None class RegistryStatistics(msrest.serialization.Model): \"\"\"Identity registry statistics.", "{ 'routes': {'key': 'routes', 'type': '[MatchedRoute]'}, } def __init__( self,", "capacity: ~azure.mgmt.iothub.v2019_11_04.models.IotHubCapacity \"\"\" _validation = { 'resource_type': {'readonly': True}, 'sku':", "hub cloud-to-device messaging properties. :type cloud_to_device: ~azure.mgmt.iothub.v2019_11_04.models.CloudToDeviceProperties :param comments: IoT", "~azure.mgmt.iothub.v2019_11_04.models.IotHubNameUnavailabilityReason :param message: The detailed reason message. :type message: str", "super(FallbackRouteProperties, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.source = kwargs['source'] self.condition", "role: The role of the region, can be either primary", "to send to Azure. :param failover_region: Required. Region the hub", "body: Body of routing message. :type body: str :param app_properties:", "'rights': {'required': True}, } _attribute_map = { 'key_name': {'key': 'keyName',", "str :param container_name: Required. The name of the root container", "array of Event Hub-compatible consumer group names with a next", "\"\"\"The object that represents the operation. Variables are only populated", "values include: \"Accept\", \"Reject\". :type action: str or ~azure.mgmt.iothub.v2019_11_04.models.IpFilterActionType :param", "Hub operations supported by the Microsoft.Devices resource provider. :vartype value:", "key. :type secondary_key: str :param rights: Required. The permissions assigned", "the job. Possible values include: \"unknown\", \"enqueued\", \"running\", \"completed\", \"failed\",", "default number of units. :vartype default: long :ivar scale_type: The", "The description of an X509 CA Certificate including the challenge", "'type': '[EventHubConsumerGroupInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(", "shared access policy. :type key_name: str :param primary_key: The primary", "_attribute_map = { 'value': {'key': 'value', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'next_link': {'key':", "tags. :type properties: dict[str, str] :ivar id: The Event Hub-compatible", "from this endpoint. Possible values include: \"unknown\", \"healthy\", \"unhealthy\", \"dead\".", "endpoints that IoT hub routes messages to, based on the", "will evaluate to true by default. For grammar, See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language.", "'encoding': {'key': 'encoding', 'type': 'str'}, } def __init__( self, **kwargs", "None self.next_link = None class RegistryStatistics(msrest.serialization.Model): \"\"\"Identity registry statistics. Variables", "kwargs.get('severity', None) self.location = kwargs.get('location', None) class RouteErrorPosition(msrest.serialization.Model): \"\"\"Position where", "storage account. :type subscription_id: str :param resource_group: The name of", "messages that satisfy the condition are routed. Currently only one", "self.sas_ttl_as_iso8601 = kwargs.get('sas_ttl_as_iso8601', None) self.connection_string = kwargs['connection_string'] self.container_name = kwargs['container_name']", "bus topic endpoint. :type resource_group: str \"\"\" _validation = {", "disabled_device_count: long \"\"\" _validation = { 'total_device_count': {'readonly': True}, 'enabled_device_count':", "'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self,", "retried sending messages for the retrial period. See IoT Hub", "\"dead\". :type health_status: str or ~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthStatus \"\"\" _attribute_map = {", "def __init__( self, **kwargs ): super(FailoverInput, self).__init__(**kwargs) self.failover_region = kwargs['failover_region']", ":param value: The array of JobResponse objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.JobResponse]", "SKU info. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo \"\"\" _validation = { 'id':", "self).__init__(**kwargs) self.name = kwargs['name'] self.tier = None self.capacity = kwargs.get('capacity',", "{'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'container_name': {'required': True}, 'batch_frequency_in_seconds': {'maximum': 720,", "self.endpoint_id = kwargs.get('endpoint_id', None) self.health_status = kwargs.get('health_status', None) class EndpointHealthDataListResult(msrest.serialization.Model):", "{'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags':", "free hubs. :param service_bus_queues: The list of Service Bus queue", "{'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly':", "kwargs.get('result', None) self.details = kwargs.get('details', None) class TestRouteResultDetails(msrest.serialization.Model): \"\"\"Detailed result", "str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition which is evaluated", "rules. :type service_bus_queues: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusQueueEndpointProperties] :param service_bus_topics: The list of Service", "'capacity', 'type': 'long'}, } def __init__( self, **kwargs ): super(IotHubSkuInfo,", "def __init__( self, **kwargs ): super(UserSubscriptionQuota, self).__init__(**kwargs) self.id = kwargs.get('id',", "'routes', 'type': '[MatchedRoute]'}, } def __init__( self, **kwargs ): super(TestAllRoutesResult,", "message: Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param twin: Routing Twin", "value of name. :type localized_value: str \"\"\" _attribute_map = {", "def __init__( self, **kwargs ): super(RoutingEventHubProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string']", "to Azure. :param name: Required. The name of the route.", "'created': {'readonly': True}, 'updated': {'readonly': True}, } _attribute_map = {", "ignored when sending a request. :param value: The array of", "related properties of the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :type routing:", "of tags. The resource tags. :type tags: dict[str, str] \"\"\"", "str :param container_name: Required. The name of storage container in", "'details': {'key': 'details', 'type': 'str'}, } def __init__( self, **kwargs", "based on the routing rules. This list does not include", "The lock duration. See: https://docs.microsoft.com/azure/iot- hub/iot-hub-devguide-file-upload. :type lock_duration_as_iso8601: ~datetime.timedelta :param", "): super(MessagingEndpointProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601',", "Name(msrest.serialization.Model): \"\"\"Name of Iot Hub type. :param value: IotHub type.", "'type': 'RoutingMessage'}, 'twin': {'key': 'twin', 'type': 'RoutingTwin'}, } def __init__(", "{'readonly': True}, 'path': {'readonly': True}, 'endpoint': {'readonly': True}, } _attribute_map", "unavailability. Possible values include: \"Invalid\", \"AlreadyExists\". :vartype reason: str or", "self.value = kwargs.get('value', None) class CertificateProperties(msrest.serialization.Model): \"\"\"The description of an", "Hub capacity information. Variables are only populated by the server,", "values include: \"Avro\", \"AvroDeflate\", \"JSON\". :type encoding: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerPropertiesEncoding", ":param value: IotHub type. :type value: str :param localized_value: Localized", "= kwargs.get('sas_ttl_as_iso8601', None) self.connection_string = kwargs['connection_string'] self.container_name = kwargs['container_name'] class", "operations and a URL link to get the next set", "an endpoint. :param endpoint_id: Id of the endpoint. :type endpoint_id:", "def __init__( self, **kwargs ): super(ImportDevicesRequest, self).__init__(**kwargs) self.input_blob_container_uri = kwargs['input_blob_container_uri']", "'type': '[RouteProperties]'}, 'fallback_route': {'key': 'fallbackRoute', 'type': 'FallbackRouteProperties'}, 'enrichments': {'key': 'enrichments',", "'type': 'long'}, 'enabled_device_count': {'key': 'enabledDeviceCount', 'type': 'long'}, 'disabled_device_count': {'key': 'disabledDeviceCount',", "Storage account and that MUST have its key as $default.", ":type location: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorRange \"\"\" _attribute_map = { 'message': {'key': 'message',", "of route compilation errors. :type compilation_errors: list[~azure.mgmt.iothub.v2019_11_04.models.RouteCompilationError] \"\"\" _attribute_map =", "= { 'provisioning_state': {'readonly': True}, 'state': {'readonly': True}, 'host_name': {'readonly':", "request. :param value: :type value: list[~azure.mgmt.iothub.v2019_11_04.models.UserSubscriptionQuota] :ivar next_link: :vartype next_link:", "service bus topic endpoint types. All required parameters must be", "kwargs.get('sas_ttl_as_iso8601', None) self.connection_string = kwargs['connection_string'] self.container_name = kwargs['container_name'] class TagsResource(msrest.serialization.Model):", "RegistryWrite, ServiceConnect, DeviceConnect\". :type rights: str or ~azure.mgmt.iothub.v2019_11_04.models.AccessRights \"\"\" _validation", "(DR) paired region and also the region where the IoT", "messaging properties. :type cloud_to_device: ~azure.mgmt.iothub.v2019_11_04.models.CloudToDeviceProperties :param comments: IoT hub comments.", "name of storage container in the storage account. :type container_name:", "object \"\"\" _attribute_map = { 'desired': {'key': 'desired', 'type': 'object'},", "this string containing the reason for the failure. :vartype failure_reason:", "} def __init__( self, **kwargs ): super(CertificatePropertiesWithNonce, self).__init__(**kwargs) self.subject =", "): super(ErrorDetails, self).__init__(**kwargs) self.code = None self.http_status_code = None self.message", ":type value: list[~azure.mgmt.iothub.v2019_11_04.models.JobResponse] :ivar next_link: The next link. :vartype next_link:", "next set of results. Variables are only populated by the", "state: The hub state. :vartype state: str :ivar host_name: The", "'type': 'duration'}, 'ttl_as_iso8601': {'key': 'ttlAsIso8601', 'type': 'duration'}, 'max_delivery_count': {'key': 'maxDeliveryCount',", "allowed across all endpoint types for free hubs. :param service_bus_queues:", "minimum: The minimum number of units. :vartype minimum: long :ivar", "self, **kwargs ): super(CertificateWithNonceDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id", "'maximum': 1, 'minimum': 1}, 'maximum': {'readonly': True}, 'default': {'readonly': True},", "} def __init__( self, **kwargs ): super(JobResponse, self).__init__(**kwargs) self.job_id =", "= None self.created = None self.updated = None self.verification_code =", "'int'}, } def __init__( self, **kwargs ): super(RouteErrorPosition, self).__init__(**kwargs) self.line", "queue name. :type name: str :param subscription_id: The subscription identifier", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#device-to-cloud-messages. :type partition_count: int :ivar partition_ids: The partition", "class EventHubConsumerGroupsListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of Event Hub-compatible consumer group", "{'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type':", "# Licensed under the MIT License. See License.txt in the", "kwargs.get('value', None) self.next_link = None class EventHubProperties(msrest.serialization.Model): \"\"\"The properties of", "properties of an IoT hub. Variables are only populated by", ":vartype provider: str :ivar resource: Resource Type: IotHubs. :vartype resource:", "'features': {'key': 'features', 'type': 'str'}, 'locations': {'key': 'locations', 'type': '[IotHubLocationDescription]'},", ":type feedback: ~azure.mgmt.iothub.v2019_11_04.models.FeedbackProperties \"\"\" _validation = { 'max_delivery_count': {'maximum': 100,", "output_blob_container_uri: str \"\"\" _validation = { 'input_blob_container_uri': {'required': True}, 'output_blob_container_uri':", "kwargs.get('enable_file_upload_notifications', None) self.cloud_to_device = kwargs.get('cloud_to_device', None) self.comments = kwargs.get('comments', None)", ":type start: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition :param end: End where the route error", "= kwargs['key_name'] self.primary_key = kwargs.get('primary_key', None) self.secondary_key = kwargs.get('secondary_key', None)", ":type properties: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwinProperties \"\"\" _attribute_map = { 'tags': {'key': 'tags',", "'name': {'readonly': True, 'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly': True}, 'location': {'required':", "{'key': 'storageEndpoints', 'type': '{StorageEndpointProperties}'}, 'messaging_endpoints': {'key': 'messagingEndpoints', 'type': '{MessagingEndpointProperties}'}, 'enable_file_upload_notifications':", "= kwargs.get('enable_file_upload_notifications', None) self.cloud_to_device = kwargs.get('cloud_to_device', None) self.comments = kwargs.get('comments',", "CA Certificate including the challenge nonce issued for the Proof-Of-Possession", "'str'}, 'state': {'key': 'state', 'type': 'str'}, 'host_name': {'key': 'hostName', 'type':", "str \"\"\" _validation = { 'next_link': {'readonly': True}, } _attribute_map", "str \"\"\" _validation = { 'id': {'readonly': True}, 'name': {'readonly':", "\"rebootDevice\", \"factoryResetDevice\", \"firmwareUpdate\". :vartype type: str or ~azure.mgmt.iothub.v2019_11_04.models.JobType :ivar status:", "None self.maximum = None self.default = None self.scale_type = None", "send to Azure. :ivar id: The resource identifier. :vartype id:", "Azure. :param input_blob_container_uri: Required. The input blob container URI. :type", "'str'}, 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type':", "display: ~azure.mgmt.iothub.v2019_11_04.models.OperationDisplay \"\"\" _validation = { 'name': {'readonly': True}, }", "hubs. :param service_bus_queues: The list of Service Bus queue endpoints", "to provide parameters when requesting an export of all devices", "None) self.next_link = None class IotHubSkuDescription(msrest.serialization.Model): \"\"\"SKU properties. Variables are", ":vartype next_link: str \"\"\" _validation = { 'value': {'readonly': True},", "RouteErrorRange(msrest.serialization.Model): \"\"\"Range of route errors. :param start: Start where the", "$default. Specifying more than one storage account causes an error", "resource identifier. :vartype id: str :ivar name: The name of", "{'readonly': True}, 'name': {'readonly': True}, 'etag': {'readonly': True}, 'type': {'readonly':", "None) class IotHubProperties(msrest.serialization.Model): \"\"\"The properties of an IoT hub. Variables", "while making create or update calls for the IoT hub.", "same as the actual queue name. :type name: str :param", "self, **kwargs ): super(JobResponse, self).__init__(**kwargs) self.job_id = None self.start_time_utc =", "feedback queue for cloud-to-device messages. :param lock_duration_as_iso8601: The lock duration", "to provide parameters when requesting an import of all devices", "self.properties = kwargs.get('properties', None) class MessagingEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the", "The name of the SKU. Possible values include: \"F1\", \"S1\",", "Currently only one endpoint is allowed. :type endpoint_names: list[str] :param", "'location': {'required': True}, 'sku': {'required': True}, } _attribute_map = {", "**kwargs ): super(IotHubQuotaMetricInfo, self).__init__(**kwargs) self.name = None self.current_value = None", "# Code generated by Microsoft (R) AutoRest Code Generator. #", "'type': 'str'}, } def __init__( self, **kwargs ): super(Name, self).__init__(**kwargs)", ":param export_blob_container_uri: Required. The export blob container URI. :type export_blob_container_uri:", "container endpoints that IoT hub routes messages to, based on", "\"warning\". :type severity: str or ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorSeverity :param location: Location where", ":ivar path: The Event Hub-compatible name. :vartype path: str :ivar", "\"RegistryRead, RegistryWrite\", \"RegistryRead, ServiceConnect\", \"RegistryRead, DeviceConnect\", \"RegistryWrite, ServiceConnect\", \"RegistryWrite, DeviceConnect\",", "'primaryKey', 'type': 'str'}, 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, 'rights': {'key':", "'str'}, 'certificate': {'key': 'certificate', 'type': 'str'}, } def __init__( self,", "have following meanings. The 'healthy' status shows that the endpoint", "the route error happened. :type line: int :param column: Column", "~azure.mgmt.iothub.v2019_11_04.models.CertificatePropertiesWithNonce :ivar id: The resource identifier. :vartype id: str :ivar", "properties of an Azure resource. Variables are only populated by", "not accepting messages, after IoT Hub retried sending messages for", "{'readonly': True}, 'enabled_device_count': {'readonly': True}, 'disabled_device_count': {'readonly': True}, } _attribute_map", "consume before it is expired by the IoT hub. See:", "endpoints are allowed across all endpoint types for paid hubs", "} def __init__( self, **kwargs ): super(IotHubCapacity, self).__init__(**kwargs) self.minimum =", "self.features = kwargs.get('features', None) self.locations = None class IotHubQuotaMetricInfo(msrest.serialization.Model): \"\"\"Quota", "class CertificatePropertiesWithNonce(msrest.serialization.Model): \"\"\"The description of an X509 CA Certificate including", "super(ErrorDetails, self).__init__(**kwargs) self.code = None self.http_status_code = None self.message =", "True}, 'state': {'readonly': True}, 'host_name': {'readonly': True}, 'locations': {'readonly': True},", "properties: :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwinProperties \"\"\" _attribute_map = { 'tags': {'key':", "upload is valid. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload#file-upload-notification-configuration-options. :type sas_ttl_as_iso8601: ~datetime.timedelta :param", "int :param column: Column where the route error happened. :type", "'type': 'str'}, 'endpoint_names': {'key': 'endpointNames', 'type': '[str]'}, } def __init__(", "= kwargs['ip_mask'] class JobResponse(msrest.serialization.Model): \"\"\"The properties of the Job Response", "self).__init__(**kwargs) self.job_id = None self.start_time_utc = None self.end_time_utc = None", "**kwargs ): super(SharedAccessSignatureAuthorizationRuleListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link =", "thrown. :type storage_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.StorageEndpointProperties] :param messaging_endpoints: The messaging endpoint", "operations. It contains a list of operations and a URL", "error happened. :type end: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition \"\"\" _attribute_map = { 'start':", "self, **kwargs ): super(TestAllRoutesResult, self).__init__(**kwargs) self.routes = kwargs.get('routes', None) class", "account. :type connection_string: str :param name: Required. The name that", "thrown. Not specifying a value for this property when the", "= kwargs.get('endpoints', None) self.routes = kwargs.get('routes', None) self.fallback_route = kwargs.get('fallback_route',", "column: int \"\"\" _attribute_map = { 'line': {'key': 'line', 'type':", "'systemProperties', 'type': '{str}'}, } def __init__( self, **kwargs ): super(RoutingMessage,", "The resource identifier. :vartype id: str :ivar name: The resource", "super(IotHubSkuDescription, self).__init__(**kwargs) self.resource_type = None self.sku = kwargs['sku'] self.capacity =", "account causes an error to be thrown. Not specifying a", "'disabledDeviceCount', 'type': 'long'}, } def __init__( self, **kwargs ): super(RegistryStatistics,", "~azure.mgmt.iothub.v2019_11_04.models.RoutingTwinProperties \"\"\" _attribute_map = { 'tags': {'key': 'tags', 'type': 'object'},", "service bus queue endpoint. :type subscription_id: str :param resource_group: The", "self.certificate = kwargs.get('certificate', None) class CertificateWithNonceDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables", "\"false\", \"true\". :type result: str or ~azure.mgmt.iothub.v2019_11_04.models.TestResultStatus :param details: Detailed", "of the conditions specified in the 'routes' section are met.", "__init__( self, **kwargs ): super(OperationDisplay, self).__init__(**kwargs) self.provider = None self.resource", "None self.resource = None self.operation = None self.description = None", "IotHubDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubDescription objects with a next", "None) self.next_link = None class MatchedRoute(msrest.serialization.Model): \"\"\"Routes that matched. :param", "_attribute_map = { 'properties': {'key': 'properties', 'type': 'CertificatePropertiesWithNonce'}, 'id': {'key':", "of IotHubDescription objects with a next link. Variables are only", "{'key': 'keyName', 'type': 'str'}, 'primary_key': {'key': 'primaryKey', 'type': 'str'}, 'secondary_key':", "__init__( self, **kwargs ): super(UserSubscriptionQuota, self).__init__(**kwargs) self.id = kwargs.get('id', None)", "event hub endpoint. All required parameters must be populated in", "'etag', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'IotHubProperties'}, 'sku': {'key':", "\"\"\" _validation = { 'provisioning_state': {'readonly': True}, 'state': {'readonly': True},", "# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed", "name of the resource group of the service bus topic", "of provisioned IoT Hub units. See: https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits. :type capacity: long", "class TestRouteResultDetails(msrest.serialization.Model): \"\"\"Detailed result of testing a route. :param compilation_errors:", "and will be ignored when sending a request. :ivar subject:", "dict[str, ~azure.mgmt.iothub.v2019_11_04.models.EventHubProperties] :param routing: The routing related properties of the", "to provide failover region when requesting manual Failover for a", "properties. :param max_delivery_count: The max delivery count for cloud-to-device messages", "related properties of the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :param endpoints:", "'properties', 'type': 'CertificatePropertiesWithNonce'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key':", "'str'}, } def __init__( self, **kwargs ): super(CertificateDescription, self).__init__(**kwargs) self.properties", "'http_status_code': {'readonly': True}, 'message': {'readonly': True}, 'details': {'readonly': True}, }", "The input blob container URI. :type input_blob_container_uri: str :param output_blob_container_uri:", "class EventHubProperties(msrest.serialization.Model): \"\"\"The properties of the provisioned Event Hub-compatible endpoint", "IoT Hub retried sending messages for the retrial period. See", "hyphens and has a maximum length of 64 characters. The", "enabled_device_count: The count of enabled devices in the identity registry.", "IoT hub shared access policy. All required parameters must be", "} _attribute_map = { 'name_available': {'key': 'nameAvailable', 'type': 'bool'}, 'reason':", "registry. :vartype disabled_device_count: long \"\"\" _validation = { 'total_device_count': {'readonly':", "str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubReplicaRoleType \"\"\" _attribute_map = { 'location': {'key': 'location',", "{'required': True}, 'action': {'required': True}, 'ip_mask': {'required': True}, } _attribute_map", "request. :ivar job_id: The job identifier. :vartype job_id: str :ivar", "endpoints that IoT hub routes the messages to, based on", "def __init__( self, **kwargs ): super(EndpointHealthData, self).__init__(**kwargs) self.endpoint_id = kwargs.get('endpoint_id',", "'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'endpoint_names': {'key': 'endpointNames',", "next_link: The next link. :vartype next_link: str \"\"\" _validation =", "the fallback route. If the condition is not provided it", "\"\"\" _validation = { 'name_available': {'readonly': True}, 'reason': {'readonly': True},", "Possible values include: \"Invalid\", \"AlreadyExists\". :vartype reason: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubNameUnavailabilityReason", "of the Azure region. :type location: str :param role: The", "class EndpointHealthDataListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of EndpointHealthData objects with a", "it is expired by the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload.", "self).__init__(**kwargs) self.name = None self.current_value = None self.max_value = None", "'type': 'str'}, } def __init__( self, **kwargs ): super(StorageEndpointProperties, self).__init__(**kwargs)", "\"\"\"The JSON-serialized array of EndpointHealthData objects with a next link.", "= { 'input_blob_container_uri': {'key': 'inputBlobContainerUri', 'type': 'str'}, 'output_blob_container_uri': {'key': 'outputBlobContainerUri',", "def __init__( self, **kwargs ): super(IotHubSkuInfo, self).__init__(**kwargs) self.name = kwargs['name']", "is *not* required. If it is provided in the response", "{'key': 'healthStatus', 'type': 'str'}, } def __init__( self, **kwargs ):", "The name of the root container where you upload files.", "self.resource_group = kwargs.get('resource_group', None) self.container_name = kwargs['container_name'] self.file_name_format = kwargs.get('file_name_format',", "desired: object :param reported: Twin desired properties. :type reported: object", ":ivar is_verified: Determines whether certificate has been verified. :vartype is_verified:", "**kwargs ): super(ExportDevicesRequest, self).__init__(**kwargs) self.export_blob_container_uri = kwargs['export_blob_container_uri'] self.exclude_keys = kwargs['exclude_keys']", "dict[str, str] :param etag: The Etag field is *not* required.", "'[RoutingEventHubProperties]'}, 'storage_containers': {'key': 'storageContainers', 'type': '[RoutingStorageContainerProperties]'}, } def __init__( self,", ":param connection_string: Required. The connection string of the event hub", "The properties of the feedback queue for cloud-to-device messages. :type", "to Azure. :param export_blob_container_uri: Required. The export blob container URI.", "a maximum length of 64 characters, and must be unique.", ":type batch_frequency_in_seconds: int :param max_chunk_size_in_bytes: Maximum number of bytes for", "IotHubQuotaMetricInfo(msrest.serialization.Model): \"\"\"Quota metrics properties. Variables are only populated by the", "{'required': True}, 'endpoint_names': {'required': True, 'max_items': 1, 'min_items': 1}, 'is_enabled':", "'type': {'readonly': True}, } _attribute_map = { 'properties': {'key': 'properties',", "array of IotHubDescription objects with a next link. Variables are", "to storage. Value should be between 10485760(10MB) and 524288000(500MB). Default", "a header per the normal ETag convention. :type etag: str", "IoT Hub units. See: https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits. :type capacity: long \"\"\" _validation", "This is an optional parameter. When this property is not", "} def __init__( self, **kwargs ): super(UserSubscriptionQuota, self).__init__(**kwargs) self.id =", "\"secondary\". :type role: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubReplicaRoleType \"\"\" _attribute_map = {", ":vartype host_name: str :param event_hub_endpoints: The Event Hub-compatible endpoint properties.", "section get routed to the built-in eventhub endpoint. :type fallback_route:", "on the routing rules. :type storage_containers: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerProperties] \"\"\" _attribute_map =", "the SKU of the IoT hub. Variables are only populated", "~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition which is evaluated in order", "endpoints. A maximum of 100 routing rules are allowed for", "'route': {'key': 'route', 'type': 'RouteProperties'}, 'twin': {'key': 'twin', 'type': 'RoutingTwin'},", "ignored when sending a request. :ivar name: The name of", "\"error\", \"warning\". :type severity: str or ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorSeverity :param location: Location", "class SharedAccessSignatureAuthorizationRuleListResult(msrest.serialization.Model): \"\"\"The list of shared access policies with a", "which the enrichment is applied to the message. :type endpoint_names:", "state. :vartype state: str :ivar host_name: The name of the", ":type message: str :param severity: Severity of the route error.", ":param key: Required. The key or name for the enrichment", "kwargs.get('routes', None) class TestRouteInput(msrest.serialization.Model): \"\"\"Input for testing route. All required", "created: ~datetime.datetime :ivar updated: The certificate's last update date and", "True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[EventHubConsumerGroupInfo]'},", ":type storage_containers: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerProperties] \"\"\" _attribute_map = { 'service_bus_queues': {'key': 'serviceBusQueues',", "delivered to or rejected from this endpoint. Possible values include:", "None) class OperationDisplay(msrest.serialization.Model): \"\"\"The object that represents the operation. Variables", "} _attribute_map = { 'value': {'key': 'value', 'type': '[JobResponse]'}, 'next_link':", "'str'}, 'locations': {'key': 'locations', 'type': '[IotHubLocationDescription]'}, } def __init__( self,", "enabled. Possible values include: \"Automatic\", \"Manual\", \"None\". :vartype scale_type: str", "of shared access policies with a next link. Variables are", "header per the normal ETag convention. :type etag: str :param", "= { 'resource_type': {'readonly': True}, 'sku': {'required': True}, 'capacity': {'required':", "'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__(", "identifier of the parent job, if any. :vartype parent_job_id: str", "of the event hub endpoint. :type connection_string: str :param name:", "properties: IotHub properties. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.IotHubProperties :param sku: Required. IotHub", "{'key': 'default', 'type': 'long'}, 'scale_type': {'key': 'scaleType', 'type': 'str'}, }", "None) self.resource_group = kwargs.get('resource_group', None) class RoutingStorageContainerProperties(msrest.serialization.Model): \"\"\"The properties related", "__init__( self, **kwargs ): super(EventHubProperties, self).__init__(**kwargs) self.retention_time_in_days = kwargs.get('retention_time_in_days', None)", "self, **kwargs ): super(OperationInputs, self).__init__(**kwargs) self.name = kwargs['name'] class OperationListResult(msrest.serialization.Model):", "_validation = { 'name': {'required': True}, 'tier': {'readonly': True}, }", "'parent_job_id': {'readonly': True}, } _attribute_map = { 'job_id': {'key': 'jobId',", "'maxDeliveryCount', 'type': 'int'}, } def __init__( self, **kwargs ): super(MessagingEndpointProperties,", "disabled devices in the identity registry. :vartype disabled_device_count: long \"\"\"", "type. :type unit: str :param current_value: Current number of IotHub", "= { 'filter_name': {'key': 'filterName', 'type': 'str'}, 'action': {'key': 'action',", ":ivar minimum: The minimum number of units. :vartype minimum: long", "messaging_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.MessagingEndpointProperties] :param enable_file_upload_notifications: If True, file upload notifications", "'provider', 'type': 'str'}, 'resource': {'key': 'resource', 'type': 'str'}, 'operation': {'key':", "'type': '{EventHubProperties}'}, 'routing': {'key': 'routing', 'type': 'RoutingProperties'}, 'storage_endpoints': {'key': 'storageEndpoints',", "): super(RegistryStatistics, self).__init__(**kwargs) self.total_device_count = None self.enabled_device_count = None self.disabled_device_count", "for receiving device-to-cloud messages in the Event Hub-compatible endpoint. See:", "'connection_string': {'required': True}, 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, } _attribute_map", "None) self.enrichments = kwargs.get('enrichments', None) class RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model): \"\"\"The properties related", "'ip_filter_rules': {'key': 'ipFilterRules', 'type': '[IpFilterRule]'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'},", ":param message: Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param route: Required.", "True}, 'batch_frequency_in_seconds': {'maximum': 720, 'minimum': 60}, 'max_chunk_size_in_bytes': {'maximum': 524288000, 'minimum':", "section are met. This is an optional parameter. When this", "**kwargs ): super(CertificatePropertiesWithNonce, self).__init__(**kwargs) self.subject = None self.expiry = None", "tags. The resource tags. :type tags: dict[str, str] :param etag:", "'[RouteProperties]'}, 'fallback_route': {'key': 'fallbackRoute', 'type': 'FallbackRouteProperties'}, 'enrichments': {'key': 'enrichments', 'type':", "{'readonly': True}, 'resource': {'readonly': True}, 'operation': {'readonly': True}, 'description': {'readonly':", "limit: Numerical limit on IotHub type. :type limit: int :param", "routes: list[~azure.mgmt.iothub.v2019_11_04.models.RouteProperties] :param fallback_route: The properties of the route that", "IP filter rule. :type filter_name: str :param action: Required. The", ":param capacity: The number of provisioned IoT Hub units. See:", "str :ivar message: The error message. :vartype message: str :ivar", "IoT hub. Variables are only populated by the server, and", "{'key': 'properties', 'type': 'RouteProperties'}, } def __init__( self, **kwargs ):", "in order to send to Azure. :param message: Routing message.", "\"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type routing_source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param message: Routing", "related to the custom endpoints to which your IoT hub", "class RoutingServiceBusTopicEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to service bus topic endpoint", "= kwargs.get('routes', None) self.fallback_route = kwargs.get('fallback_route', None) self.enrichments = kwargs.get('enrichments',", "Event Hub-compatible endpoint. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#device-to-cloud-messages. :type partition_count: int :ivar", "partition_count: The number of partitions for receiving device-to-cloud messages in", "IoT hub comments. :type comments: str :param features: The capabilities", "DeviceConnect\", \"RegistryWrite, ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect, DeviceConnect\". :type rights:", "'RoutingMessage'}, 'twin': {'key': 'twin', 'type': 'RoutingTwin'}, } def __init__( self,", "class SharedAccessSignatureAuthorizationRule(msrest.serialization.Model): \"\"\"The properties of an IoT hub shared access", "include alphanumeric characters, periods, underscores, hyphens, has a maximum length", "= kwargs.get('ttl_as_iso8601', None) self.max_delivery_count = kwargs.get('max_delivery_count', None) class Name(msrest.serialization.Model): \"\"\"Name", "None) self.secondary_key = kwargs.get('secondary_key', None) self.rights = kwargs['rights'] class SharedAccessSignatureAuthorizationRuleListResult(msrest.serialization.Model):", "**kwargs ): super(TagsResource, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) class TestAllRoutesInput(msrest.serialization.Model):", "{'key': 'name', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, }", "line: Line where the route error happened. :type line: int", "Azure. :param name: Required. The name of the route. The", "include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type routing_source: str or", "messages as expected and IoT Hub is retrying to send", "self.primary_key = kwargs.get('primary_key', None) self.secondary_key = kwargs.get('secondary_key', None) self.rights =", "**kwargs ): super(CertificateWithNonceDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id =", "feedback queue for cloud-to-device messages. :type feedback: ~azure.mgmt.iothub.v2019_11_04.models.FeedbackProperties \"\"\" _validation", "'reported': {'key': 'reported', 'type': 'object'}, } def __init__( self, **kwargs", "be reordered. :type file_name_format: str :param batch_frequency_in_seconds: Time interval at", "class IotHubSkuInfo(msrest.serialization.Model): \"\"\"Information about the SKU of the IoT hub.", "provider: Service provider: Microsoft Devices. :vartype provider: str :ivar resource:", "msrest.serialization class CertificateBodyDescription(msrest.serialization.Model): \"\"\"The JSON-serialized X509 Certificate. :param certificate: base-64", "None self.verification_code = None self.certificate = None class CertificateVerificationDescription(msrest.serialization.Model): \"\"\"The", "{'key': 'capacity', 'type': 'long'}, } def __init__( self, **kwargs ):", "_validation = { 'value': {'readonly': True}, 'next_link': {'readonly': True}, }", "routing rule is to be applied to, such as DeviceMessages.", "export_blob_container_uri: str :param exclude_keys: Required. The value indicating whether keys", "resource_group: The name of the resource group of the storage", "rights reserved. # Licensed under the MIT License. See License.txt", "): super(JobResponse, self).__init__(**kwargs) self.job_id = None self.start_time_utc = None self.end_time_utc", "order to send to Azure. :param filter_name: Required. The name", "self).__init__(**kwargs) self.name = None self.display = kwargs.get('display', None) class OperationDisplay(msrest.serialization.Model):", "IoT hub cloud-to-device messaging properties. :type cloud_to_device: ~azure.mgmt.iothub.v2019_11_04.models.CloudToDeviceProperties :param comments:", "the Azure Storage account to which files are uploaded. :type", "'type': 'TestRouteResultDetails'}, } def __init__( self, **kwargs ): super(TestRouteResult, self).__init__(**kwargs)", "ignored when sending a request. :param value: The list of", "self).__init__(**kwargs) self.message = kwargs.get('message', None) self.severity = kwargs.get('severity', None) self.location", "configure only one Azure Storage account and that MUST have", "self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class IotHubSkuInfo(msrest.serialization.Model):", "{'key': 'limit', 'type': 'int'}, 'name': {'key': 'name', 'type': 'Name'}, }", "shared access policy. All required parameters must be populated in", "in order to send to Azure. :param key: Required. The", "def __init__( self, **kwargs ): super(EventHubProperties, self).__init__(**kwargs) self.retention_time_in_days = kwargs.get('retention_time_in_days',", "as expected and IoT Hub is retrying to send data", "The subscription identifier of the service bus queue endpoint. :type", "name_available: The value which indicates whether the provided name is", "array of IotHubSkuDescription objects with a next link. Variables are", "subscription quota response. :param id: IotHub type id. :type id:", "True}, 'http_status_code': {'readonly': True}, 'message': {'readonly': True}, 'details': {'readonly': True},", "kwargs['name'] self.subscription_id = kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) class", "True}, 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'container_name': {'required': True}, 'batch_frequency_in_seconds':", "a request. :param value: The array of JobResponse objects. :type", "str :ivar name: The name of the certificate. :vartype name:", "self.provider = None self.resource = None self.operation = None self.description", "grammar, See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param endpoint_names: Required. The", "testing a route. :param compilation_errors: JSON-serialized list of route compilation", "'str'}, 'details': {'key': 'details', 'type': 'str'}, } def __init__( self,", "to apply the routing rule. If no condition is provided,", ":param twin: Routing Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _attribute_map", "'created': {'readonly': True}, 'updated': {'readonly': True}, 'verification_code': {'readonly': True}, 'certificate':", "The connection string for the Azure Storage account to which", ":param column: Column where the route error happened. :type column:", ":type value: str :param localized_value: Localized value of name. :type", "Storage endpoints where you can upload files. Currently you can", "'name', 'type': 'str'}, 'source': {'key': 'source', 'type': 'str'}, 'condition': {'key':", "{ 'failover_region': {'required': True}, } _attribute_map = { 'failover_region': {'key':", "resource: Resource Type: IotHubs. :vartype resource: str :ivar operation: Name", "expired by the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload. :type ttl_as_iso8601:", ":type container_name: str \"\"\" _validation = { 'connection_string': {'required': True},", "'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): super(EventHubConsumerGroupsListResult,", "JSON-serialized leaf certificate. :param certificate: base-64 representation of X509 certificate", "__init__( self, **kwargs ): super(IotHubQuotaMetricInfoListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None)", "$default. Endpoint names must be unique across endpoint types. :type", "that the endpoint is not accepting messages, after IoT Hub", "bus queue endpoint types. All required parameters must be populated", "= { 'code': {'key': 'code', 'type': 'str'}, 'http_status_code': {'key': 'httpStatusCode',", "{'key': 'subject', 'type': 'str'}, 'expiry': {'key': 'expiry', 'type': 'rfc-1123'}, 'thumbprint':", "{'key': 'type', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'failure_reason':", "'batch_frequency_in_seconds': {'maximum': 720, 'minimum': 60}, 'max_chunk_size_in_bytes': {'maximum': 524288000, 'minimum': 10485760},", "**kwargs ): super(IotHubProperties, self).__init__(**kwargs) self.authorization_policies = kwargs.get('authorization_policies', None) self.ip_filter_rules =", "\"\"\" _validation = { 'minimum': {'readonly': True, 'maximum': 1, 'minimum':", "value: list[~azure.mgmt.iothub.v2019_11_04.models.UserSubscriptionQuota] :ivar next_link: :vartype next_link: str \"\"\" _validation =", "'value', 'type': '[CertificateDescription]'}, } def __init__( self, **kwargs ): super(CertificateListDescription,", "'str'}, } def __init__( self, **kwargs ): super(EventHubConsumerGroupInfo, self).__init__(**kwargs) self.properties", "= None self.etag = None self.type = None class CloudToDeviceProperties(msrest.serialization.Model):", "for file upload is valid. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload#file-upload-notification-configuration-options. :type sas_ttl_as_iso8601:", "self).__init__(**kwargs) self.provider = None self.resource = None self.operation = None", "{'readonly': True}, } _attribute_map = { 'retention_time_in_days': {'key': 'retentionTimeInDays', 'type':", "should be between 10485760(10MB) and 524288000(500MB). Default value is 314572800(300MB).", "request. :ivar code: The error code. :vartype code: str :ivar", "as expected. The 'unhealthy' status shows that the endpoint is", "the certificate. :vartype name: str :ivar etag: The entity tag.", "} _attribute_map = { 'value': {'key': 'value', 'type': '[UserSubscriptionQuota]'}, 'next_link':", "serialize messages to blobs. Supported values are 'avro', 'avrodeflate', and", "twin: Routing Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _attribute_map =", "is_enabled: Required. Used to specify whether the fallback route is", "self.next_link = None class EnrichmentProperties(msrest.serialization.Model): \"\"\"The properties of an enrichment", "None class EventHubConsumerGroupsListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of Event Hub-compatible consumer", "{'readonly': True}, } _attribute_map = { 'authorization_policies': {'key': 'authorizationPolicies', 'type':", "for which a message is available to consume before it", "to which your IoT hub routes messages based on the", "queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type default_ttl_as_iso8601: ~datetime.timedelta :param feedback: The", "are routed. Currently only one endpoint is allowed. :type endpoint_names:", "primary_key: The primary key. :type primary_key: str :param secondary_key: The", "endpoint. :type connection_string: str :param name: Required. The name that", "delivered to endpoints. All required parameters must be populated in", "{'key': 'properties', 'type': 'CertificatePropertiesWithNonce'}, 'id': {'key': 'id', 'type': 'str'}, 'name':", "{'key': 'reported', 'type': 'object'}, } def __init__( self, **kwargs ):", "send to Azure. :param input_blob_container_uri: Required. The input blob container", "topic endpoint types. All required parameters must be populated in", "is allowed across all endpoint types for free hubs. :type", "the next set of operation list results if there are", "{'key': 'eventHubs', 'type': '[RoutingEventHubProperties]'}, 'storage_containers': {'key': 'storageContainers', 'type': '[RoutingStorageContainerProperties]'}, }", "def __init__( self, **kwargs ): super(MessagingEndpointProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601',", ":param location: Required. The resource location. :type location: str :param", "Column where the route error happened. :type column: int \"\"\"", "request. :param properties: The description of an X509 CA Certificate", "_attribute_map = { 'total_device_count': {'key': 'totalDeviceCount', 'type': 'long'}, 'enabled_device_count': {'key':", "'containerName', 'type': 'str'}, 'file_name_format': {'key': 'fileNameFormat', 'type': 'str'}, 'batch_frequency_in_seconds': {'key':", "'etag': {'key': 'etag', 'type': 'str'}, } def __init__( self, **kwargs", "None self.updated = None self.verification_code = None self.certificate = None", "message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param route: Required. Route properties. :type route: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties", "{'key': 'filterName', 'type': 'str'}, 'action': {'key': 'action', 'type': 'str'}, 'ip_mask':", "None self.location = kwargs['location'] self.tags = kwargs.get('tags', None) class IotHubDescription(Resource):", "The description of an X509 CA Certificate. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificateProperties", ":vartype tier: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuTier :param capacity: The number of", "storage_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.StorageEndpointProperties] :param messaging_endpoints: The messaging endpoint properties for", "of JobResponse objects with a next link. Variables are only", "'type': 'str'}, } def __init__( self, **kwargs ): super(CertificateProperties, self).__init__(**kwargs)", "): super(SharedAccessSignatureAuthorizationRule, self).__init__(**kwargs) self.key_name = kwargs['key_name'] self.primary_key = kwargs.get('primary_key', None)", "self.event_hubs = kwargs.get('event_hubs', None) self.storage_containers = kwargs.get('storage_containers', None) class RoutingEventHubProperties(msrest.serialization.Model):", "any. :vartype parent_job_id: str \"\"\" _validation = { 'job_id': {'readonly':", "parameters must be populated in order to send to Azure.", "X509 certificate .cer file or just .pem file content. :type", "_validation = { 'id': {'readonly': True}, 'name': {'readonly': True, 'pattern':", ":param lock_duration_as_iso8601: The lock duration for the feedback queue. See:", "True}, } _attribute_map = { 'input_blob_container_uri': {'key': 'inputBlobContainerUri', 'type': 'str'},", "{ 'name_available': {'key': 'nameAvailable', 'type': 'bool'}, 'reason': {'key': 'reason', 'type':", "self.feedback = kwargs.get('feedback', None) class EndpointHealthData(msrest.serialization.Model): \"\"\"The health data for", "import of all devices in the hub. All required parameters", "array of matched routes. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.MatchedRoute] \"\"\" _attribute_map =", "} def __init__( self, **kwargs ): super(RouteCompilationError, self).__init__(**kwargs) self.message =", "endpoint_id: Id of the endpoint. :type endpoint_id: str :param health_status:", "'type': '[IotHubDescription]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(", "Required. The name of the route. The name can only", "{'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type':", "{'key': 'sasTtlAsIso8601', 'type': 'duration'}, 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'container_name':", "region and also the region where the IoT hub can", "'unhealthy' status shows that the endpoint is not accepting messages", "): super(UserSubscriptionQuota, self).__init__(**kwargs) self.id = kwargs.get('id', None) self.type = kwargs.get('type',", "_attribute_map = { 'properties': {'key': 'properties', 'type': '{str}'}, 'id': {'key':", ":ivar provider: Service provider: Microsoft Devices. :vartype provider: str :ivar", "hub endpoint. :type connection_string: str :param name: Required. The name", "= kwargs['failover_region'] class FallbackRouteProperties(msrest.serialization.Model): \"\"\"The properties of the fallback route.", "{ 'resource_type': {'readonly': True}, 'sku': {'required': True}, 'capacity': {'required': True},", "The current value for the quota metric. :vartype current_value: long", "self, **kwargs ): super(Name, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.localized_value", "_attribute_map = { 'filter_name': {'key': 'filterName', 'type': 'str'}, 'action': {'key':", "status shows that the endpoint is accepting messages as expected.", "kwargs.get('certificate', None) class CertificatePropertiesWithNonce(msrest.serialization.Model): \"\"\"The description of an X509 CA", "For grammar, See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param endpoint_names: Required.", "for proof of possession. :vartype verification_code: str :ivar certificate: The", ":type endpoint_id: str :param health_status: Health statuses have following meanings.", "'healthy' status shows that the endpoint is accepting messages as", "= kwargs.get('value', None) self.next_link = None class IotHubSkuDescription(msrest.serialization.Model): \"\"\"SKU properties.", "= None self.endpoint = None class ExportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide", "class RoutingTwinProperties(msrest.serialization.Model): \"\"\"RoutingTwinProperties. :param desired: Twin desired properties. :type desired:", "container holding only the Tags for a resource, allowing the", "\"\"\" _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, }", "\"\"\"The properties related to service bus queue endpoint types. All", "} _attribute_map = { 'value': {'key': 'value', 'type': '[EventHubConsumerGroupInfo]'}, 'next_link':", "ServiceConnect, DeviceConnect\", \"RegistryWrite, ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect, DeviceConnect\". :type", "= kwargs.get('properties', None) class RoutingTwinProperties(msrest.serialization.Model): \"\"\"RoutingTwinProperties. :param desired: Twin desired", "super(CertificateListDescription, self).__init__(**kwargs) self.value = kwargs.get('value', None) class CertificateProperties(msrest.serialization.Model): \"\"\"The description", "'type': 'int'}, 'column': {'key': 'column', 'type': 'int'}, } def __init__(", ":param name: Required. The name that identifies this endpoint. The", "'ttl_as_iso8601': {'key': 'ttlAsIso8601', 'type': 'duration'}, 'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'},", "routing rules are allowed for free hubs. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.RouteProperties]", "by Microsoft (R) AutoRest Code Generator. # Changes may cause", ":param sas_ttl_as_iso8601: The period of time for which the SAS", "= { 'value': {'key': 'value', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'next_link': {'key': 'nextLink',", "self.id = kwargs.get('id', None) self.type = kwargs.get('type', None) self.unit =", "): super(EventHubConsumerGroupsListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None", "status: The status of the job. Possible values include: \"unknown\",", "'str'}, 'severity': {'key': 'severity', 'type': 'str'}, 'location': {'key': 'location', 'type':", "'str'}, } def __init__( self, **kwargs ): super(IotHubDescriptionListResult, self).__init__(**kwargs) self.value", "'disabled_device_count': {'key': 'disabledDeviceCount', 'type': 'long'}, } def __init__( self, **kwargs", ":param limit: Numerical limit on IotHub type. :type limit: int", "= None class IotHubLocationDescription(msrest.serialization.Model): \"\"\"Public representation of one of the", "'str'}, } def __init__( self, **kwargs ): super(IotHubSkuDescriptionListResult, self).__init__(**kwargs) self.value", "to send to Azure. :param connection_string: Required. The connection string", "str \"\"\" _validation = { 'name': {'required': True}, } _attribute_map", "units. :vartype maximum: long :ivar default: The default number of", "{ 'provider': {'readonly': True}, 'resource': {'readonly': True}, 'operation': {'readonly': True},", "Possible values include: \"unknown\", \"export\", \"import\", \"backup\", \"readDeviceProperties\", \"writeDeviceProperties\", \"updateDeviceConfiguration\",", "provisioning_state: str :ivar state: The hub state. :vartype state: str", "twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _attribute_map = { 'routing_source': {'key': 'routingSource', 'type':", "'str'}, 'http_status_code': {'key': 'httpStatusCode', 'type': 'str'}, 'message': {'key': 'message', 'type':", "whether a route is enabled. :type is_enabled: bool \"\"\" _validation", "'{str}'}, 'system_properties': {'key': 'systemProperties', 'type': '{str}'}, } def __init__( self,", "properties: The tags. :type properties: dict[str, str] :ivar id: The", "for the job. :vartype status_message: str :ivar parent_job_id: The job", "= None self.http_status_code = None self.message = None self.details =", "updated: ~datetime.datetime :ivar verification_code: The certificate's verification code that will", "\"unknown\", \"export\", \"import\", \"backup\", \"readDeviceProperties\", \"writeDeviceProperties\", \"updateDeviceConfiguration\", \"rebootDevice\", \"factoryResetDevice\", \"firmwareUpdate\".", "to Azure. :param key_name: Required. The name of the shared", "**kwargs ): super(RoutingServiceBusQueueEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name']", "to be applied to, such as DeviceMessages. Possible values include:", "{ 'value': {'key': 'value', 'type': '[UserSubscriptionQuota]'}, 'next_link': {'key': 'nextLink', 'type':", "endpoint is not accepting messages, after IoT Hub retried sending", "convention. :type etag: str :param properties: IotHub properties. :type properties:", "endpoint_names: Required. The list of endpoints to which messages that", "and monitor issues with endpoints. The 'unknown' status shows that", "to true by default. For grammar, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition:", "default. For grammar, See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param endpoint_names:", "def __init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRuleListResult, self).__init__(**kwargs) self.value = kwargs.get('value',", "'type': 'IotHubCapacity'}, } def __init__( self, **kwargs ): super(IotHubSkuDescription, self).__init__(**kwargs)", "'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'}, } def __init__( self, **kwargs", "\"Automatic\", \"Manual\", \"None\". :vartype scale_type: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubScaleType \"\"\" _validation", "when the enableFileUploadNotifications property is set to True, causes an", "} def __init__( self, **kwargs ): super(CertificateWithNonceDescription, self).__init__(**kwargs) self.properties =", "resource. Variables are only populated by the server, and will", "hub name is available. Variables are only populated by the", "hub routes the messages to, based on the routing rules.", "error happened. :type line: int :param column: Column where the", "kwargs['rights'] class SharedAccessSignatureAuthorizationRuleListResult(msrest.serialization.Model): \"\"\"The list of shared access policies with", "and only 1 custom endpoint is allowed across all endpoint", "also the region where the IoT hub can failover to.", ":vartype verification_code: str :ivar certificate: The certificate content. :vartype certificate:", "{'readonly': True}, 'type': {'readonly': True}, } _attribute_map = { 'properties':", "'created', 'type': 'rfc-1123'}, 'updated': {'key': 'updated', 'type': 'rfc-1123'}, 'certificate': {'key':", "'details': {'readonly': True}, } _attribute_map = { 'code': {'key': 'code',", "to which the routing rule is to be applied to.", "result of testing route. :type details: ~azure.mgmt.iothub.v2019_11_04.models.TestRouteResultDetails \"\"\" _attribute_map =", "= kwargs.get('column', None) class RouteErrorRange(msrest.serialization.Model): \"\"\"Range of route errors. :param", "to messages delivered to endpoints. All required parameters must be", "Primary and secondary location for iot hub. :vartype locations: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubLocationDescription]", "whether certificate has been verified. :vartype is_verified: bool :ivar created:", "{'readonly': True}, 'created': {'readonly': True}, 'updated': {'readonly': True}, 'verification_code': {'readonly':", "self, **kwargs ): super(CertificatePropertiesWithNonce, self).__init__(**kwargs) self.subject = None self.expiry =", "accepting messages as expected and IoT Hub is retrying to", "= { 'code': {'readonly': True}, 'http_status_code': {'readonly': True}, 'message': {'readonly':", "error happened. :type location: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorRange \"\"\" _attribute_map = { 'message':", "kwargs['is_enabled'] class RoutingEndpoints(msrest.serialization.Model): \"\"\"The properties related to the custom endpoints", "service_bus_queues: The list of Service Bus queue endpoints that IoT", "storage_containers: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerProperties] \"\"\" _attribute_map = { 'service_bus_queues': {'key': 'serviceBusQueues', 'type':", "'type': 'str'}, 'certificate': {'key': 'certificate', 'type': 'str'}, } def __init__(", "{'key': 'routes', 'type': '[RouteProperties]'}, 'fallback_route': {'key': 'fallbackRoute', 'type': 'FallbackRouteProperties'}, 'enrichments':", "will be ignored when sending a request. :ivar total_device_count: The", "{'key': 'route', 'type': 'RouteProperties'}, 'twin': {'key': 'twin', 'type': 'RoutingTwin'}, }", "self.next_link = None class IotHubLocationDescription(msrest.serialization.Model): \"\"\"Public representation of one of", ":param key_name: Required. The name of the shared access policy.", "sending a request. :ivar name: The name of the quota", "array of IotHubDescription objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubDescription] :ivar next_link: The", "a request. :param value: The array of IotHubDescription objects. :type", "str :param sku: Required. The type of the resource. :type", "{'readonly': True}, 'http_status_code': {'readonly': True}, 'message': {'readonly': True}, 'details': {'readonly':", "of the resource group of the service bus topic endpoint.", "'type': 'rfc-1123'}, 'updated': {'key': 'updated', 'type': 'rfc-1123'}, 'verification_code': {'key': 'verificationCode',", "'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'etag': {'key': 'etag',", "desired action for requests captured by this rule. Possible values", "the resource type. :vartype type: str :ivar etag: The etag.", "number of IotHub type. :type current_value: int :param limit: Numerical", ":type comments: str :param features: The capabilities and features enabled", "def __init__( self, **kwargs ): super(CloudToDeviceProperties, self).__init__(**kwargs) self.max_delivery_count = kwargs.get('max_delivery_count',", "__init__( self, **kwargs ): super(CertificateListDescription, self).__init__(**kwargs) self.value = kwargs.get('value', None)", "of the job. Possible values include: \"unknown\", \"enqueued\", \"running\", \"completed\",", "= kwargs['name'] self.tier = None self.capacity = kwargs.get('capacity', None) class", "The connection string of the service bus queue endpoint. :type", "'type': 'str'}, } def __init__( self, **kwargs ): super(IotHubDescriptionListResult, self).__init__(**kwargs)", "name need not be the same as the actual topic", "the X509 leaf certificate .cer file or just .pem file", "self, **kwargs ): super(IotHubDescription, self).__init__(**kwargs) self.etag = kwargs.get('etag', None) self.properties", "of Azure Storage endpoints where you can upload files. Currently", "need not be the same as the actual queue name.", "routed to. Currently only 1 endpoint is allowed. :type endpoint_names:", ":ivar code: The error code. :vartype code: str :ivar http_status_code:", ":param properties: The tags. :type properties: dict[str, str] :ivar id:", "_attribute_map = { 'value': {'key': 'value', 'type': '[JobResponse]'}, 'next_link': {'key':", "endpoints that the IoT hub routes the messages to, based", "related to service bus topic endpoint types. All required parameters", "IotHub properties. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.IotHubProperties :param sku: Required. IotHub SKU", "True}, 'details': {'readonly': True}, } _attribute_map = { 'code': {'key':", "The 'healthy' status shows that the endpoint is accepting messages", "IotHubSkuDescription. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuDescription] :ivar next_link: The next link. :vartype", ":type event_hubs: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingEventHubProperties] :param storage_containers: The list of storage container", "): super(CertificateListDescription, self).__init__(**kwargs) self.value = kwargs.get('value', None) class CertificateProperties(msrest.serialization.Model): \"\"\"The", "'type', 'type': 'str'}, } def __init__( self, **kwargs ): super(CertificateWithNonceDescription,", "__init__( self, **kwargs ): super(CertificateWithNonceDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties', None)", "service_bus_queues: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusQueueEndpointProperties] :param service_bus_topics: The list of Service Bus topic", "specifying a value for this property when the enableFileUploadNotifications property", "is expired by the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#cloud-to-device-messages. :type", "to or rejected from this endpoint. Possible values include: \"unknown\",", "str :param value: Required. The value for the enrichment property.", "= kwargs.get('display', None) class OperationDisplay(msrest.serialization.Model): \"\"\"The object that represents the", "{'key': 'secondaryKey', 'type': 'str'}, 'rights': {'key': 'rights', 'type': 'str'}, }", "self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None) class CertificateWithNonceDescription(msrest.serialization.Model): \"\"\"The X509 Certificate.", "= kwargs.get('default_ttl_as_iso8601', None) self.feedback = kwargs.get('feedback', None) class EndpointHealthData(msrest.serialization.Model): \"\"\"The", "routes messages to, based on the routing rules. This list", "when sending a request. :param value: :type value: list[~azure.mgmt.iothub.v2019_11_04.models.UserSubscriptionQuota] :ivar", "for the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to-device-messages. :type lock_duration_as_iso8601: ~datetime.timedelta :param", "region where the IoT hub can failover to. Possible values", "= { 'lock_duration_as_iso8601': {'key': 'lockDurationAsIso8601', 'type': 'duration'}, 'ttl_as_iso8601': {'key': 'ttlAsIso8601',", "name of the host. :vartype host_name: str :param event_hub_endpoints: The", "= kwargs['key'] self.value = kwargs['value'] self.endpoint_names = kwargs['endpoint_names'] class ErrorDetails(msrest.serialization.Model):", "None) class RouteErrorPosition(msrest.serialization.Model): \"\"\"Position where the route error happened. :param", "} _attribute_map = { 'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'}, 'default_ttl_as_iso8601':", "messages in the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type default_ttl_as_iso8601:", "= kwargs.get('value', None) self.next_link = None class EnrichmentProperties(msrest.serialization.Model): \"\"\"The properties", "**kwargs ): super(OperationListResult, self).__init__(**kwargs) self.value = None self.next_link = None", "= kwargs.get('routes', None) class TestRouteInput(msrest.serialization.Model): \"\"\"Input for testing route. All", ":param storage_endpoints: The list of Azure Storage endpoints where you", "'next_link': {'readonly': True}, } _attribute_map = { 'value': {'key': 'value',", "a hub. All required parameters must be populated in order", "The IP filter rules. :type ip_filter_rules: list[~azure.mgmt.iothub.v2019_11_04.models.IpFilterRule] :ivar provisioning_state: The", "of the conditions specified in the 'routes' section get routed", "condition: The condition that is evaluated to apply the routing", "optional parameter. :param tags: A set of tags. Twin Tags.", "None) class CertificateWithNonceDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables are only populated", "Response object. Variables are only populated by the server, and", "list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusTopicEndpointProperties] :param event_hubs: The list of Event Hubs endpoints that", "failed, this string containing the reason for the failure. :vartype", "kwargs.get('name', None) self.source = kwargs['source'] self.condition = kwargs.get('condition', None) self.endpoint_names", "when evaluating route. :param message: Route error message. :type message:", "'str'}, 'operation': {'key': 'operation', 'type': 'str'}, 'description': {'key': 'description', 'type':", "connection to the IoT hub. :type authorization_policies: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :param ip_filter_rules:", "is not accepting messages as expected and IoT Hub is", "etag: str :ivar type: The resource type. :vartype type: str", "_attribute_map = { 'body': {'key': 'body', 'type': 'str'}, 'app_properties': {'key':", "have its key as $default. Specifying more than one storage", "'key', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, 'endpoint_names': {'key':", "Event Hub-compatible consumer group names with a next link. Variables", "super(RoutingEventHubProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id =", "self.tags = kwargs.get('tags', None) class IotHubDescription(Resource): \"\"\"The description of the", "the operation. :type display: ~azure.mgmt.iothub.v2019_11_04.models.OperationDisplay \"\"\" _validation = { 'name':", "provide failover region when requesting manual Failover for a hub.", "'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } def __init__( self,", "= { 'name': {'key': 'name', 'type': 'str'}, 'current_value': {'key': 'currentValue',", "by the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#cloud-to-device-messages. :type ttl_as_iso8601: ~datetime.timedelta", "or ~azure.mgmt.iothub.v2019_11_04.models.IotHubNameUnavailabilityReason :param message: The detailed reason message. :type message:", "a maximum length of 64 characters. The following names are", "'limit': {'key': 'limit', 'type': 'int'}, 'name': {'key': 'name', 'type': 'Name'},", "__init__( self, **kwargs ): super(Operation, self).__init__(**kwargs) self.name = None self.display", "{'key': 'action', 'type': 'str'}, 'ip_mask': {'key': 'ipMask', 'type': 'str'}, }", "of an X509 CA Certificate including the challenge nonce issued", "which indicates whether the provided name is available. :vartype name_available:", "to send to Azure. :param sas_ttl_as_iso8601: The period of time", "= None self.type = None self.etag = None class EventHubConsumerGroupsListResult(msrest.serialization.Model):", "error code. :vartype code: str :ivar http_status_code: The HTTP status", "'name': {'key': 'name', 'type': 'str'}, 'subscription_id': {'key': 'subscriptionId', 'type': 'str'},", "is 314572800(300MB). :type max_chunk_size_in_bytes: int :param encoding: Encoding that is", "): super(EventHubConsumerGroupInfo, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id = None", "hub attempts to deliver a message on the feedback queue.", "Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under", "None) self.resource_group = kwargs.get('resource_group', None) self.container_name = kwargs['container_name'] self.file_name_format =", "self).__init__(**kwargs) self.etag = kwargs.get('etag', None) self.properties = kwargs.get('properties', None) self.sku", "'container_name': {'required': True}, 'batch_frequency_in_seconds': {'maximum': 720, 'minimum': 60}, 'max_chunk_size_in_bytes': {'maximum':", "currently provisioned. The secondary region is the Azure disaster recovery", "kwargs.get('unit', None) self.current_value = kwargs.get('current_value', None) self.limit = kwargs.get('limit', None)", "= { 'connection_string': {'required': True}, 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'},", "Service Bus queue endpoints that IoT hub routes the messages", ":type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo :param capacity: Required. IotHub capacity. :type capacity:", "The list of Azure Storage endpoints where you can upload", "one of the locations where a resource is provisioned. :param", "of the service bus topic endpoint. :type connection_string: str :param", "'avrodeflate', and 'JSON'. Default value is 'avro'. Possible values include:", "the connectionString specified. :type container_name: str \"\"\" _validation = {", "to a storage container endpoint. All required parameters must be", "be ignored when sending a request. :param value: The array", "True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[Operation]'},", ":param line: Line where the route error happened. :type line:", "details: Detailed result of testing route. :type details: ~azure.mgmt.iothub.v2019_11_04.models.TestRouteResultDetails \"\"\"", "'type': '{str}'}, } def __init__( self, **kwargs ): super(Resource, self).__init__(**kwargs)", "when it routes messages to the fallback endpoint. All required", "properties. :type reported: object \"\"\" _attribute_map = { 'desired': {'key':", "class RoutingTwin(msrest.serialization.Model): \"\"\"Twin reference input parameter. This is an optional", "representation of the X509 leaf certificate .cer file or just", "type id. :type id: str :param type: Response type. :type", "be populated in order to send to Azure. :param key:", "of the IP filter rule. :type filter_name: str :param action:", "of the storage account. :type connection_string: str :param name: Required.", "True}, } _attribute_map = { 'properties': {'key': 'properties', 'type': 'CertificatePropertiesWithNonce'},", "{ 'filter_name': {'key': 'filterName', 'type': 'str'}, 'action': {'key': 'action', 'type':", "= kwargs['source'] self.condition = kwargs.get('condition', None) self.endpoint_names = kwargs['endpoint_names'] self.is_enabled", "kwargs['capacity'] class IotHubSkuDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubSkuDescription objects with", "If status == failed, this string containing the reason for", "\"\"\" _validation = { 'name': {'readonly': True}, } _attribute_map =", "sending a request. :ivar name_available: The value which indicates whether", "The status message for the job. :vartype status_message: str :ivar", "A set of tags. Twin Tags. :type tags: object :param", "**kwargs ): super(CertificateBodyDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None) class CertificateDescription(msrest.serialization.Model):", "{'key': 'unit', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'int'}, 'limit':", "self.capacity = kwargs.get('capacity', None) class IpFilterRule(msrest.serialization.Model): \"\"\"The IP filter rules", "built-in eventhub endpoint. :type fallback_route: ~azure.mgmt.iothub.v2019_11_04.models.FallbackRouteProperties :param enrichments: The list", "'is_enabled': {'key': 'isEnabled', 'type': 'bool'}, } def __init__( self, **kwargs", "the operation. Variables are only populated by the server, and", "_validation = { 'key': {'required': True}, 'value': {'required': True}, 'endpoint_names':", "class Operation(msrest.serialization.Model): \"\"\"IoT Hub REST API operation. Variables are only", "IotHub type. :type limit: int :param name: IotHub type. :type", "The certificate content. :vartype certificate: str \"\"\" _validation = {", "filter rule. :type filter_name: str :param action: Required. The desired", "kwargs.get('reported', None) class SharedAccessSignatureAuthorizationRule(msrest.serialization.Model): \"\"\"The properties of an IoT hub", "Required. IotHub SKU info. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo \"\"\" _validation =", "= { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly':", "a connection to the IoT hub. :type authorization_policies: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :param", "Azure disaster recovery (DR) paired region and also the region", "'type': 'str'}, 'health_status': {'key': 'healthStatus', 'type': 'str'}, } def __init__(", "_attribute_map = { 'desired': {'key': 'desired', 'type': 'object'}, 'reported': {'key':", "The error code. :vartype code: str :ivar http_status_code: The HTTP", "'str'}, } def __init__( self, **kwargs ): super(IpFilterRule, self).__init__(**kwargs) self.filter_name", "of units. :vartype default: long :ivar scale_type: The type of", "'duration'}, 'feedback': {'key': 'feedback', 'type': 'FeedbackProperties'}, } def __init__( self,", ":vartype name_available: bool :ivar reason: The reason for unavailability. Possible", "IP filter rules. :type ip_filter_rules: list[~azure.mgmt.iothub.v2019_11_04.models.IpFilterRule] :ivar provisioning_state: The provisioning", "will be ignored when sending a request. :param retention_time_in_days: The", "bool :ivar created: The certificate's create date and time. :vartype", "include: \"Avro\", \"AvroDeflate\", \"JSON\". :type encoding: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerPropertiesEncoding \"\"\"", "properties of the messaging endpoints used by this IoT hub.", "names with a next link. Variables are only populated by", "errors. :type compilation_errors: list[~azure.mgmt.iothub.v2019_11_04.models.RouteCompilationError] \"\"\" _attribute_map = { 'compilation_errors': {'key':", "for cloud-to-device messages in the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to- device-messages.", "\"\"\"Input for testing all routes. :param routing_source: Routing source. Possible", "status message for the job. :vartype status_message: str :ivar parent_job_id:", "monitor issues with endpoints. The 'unknown' status shows that the", "of the event hub endpoint. :type subscription_id: str :param resource_group:", "'endpointNames', 'type': '[str]'}, } def __init__( self, **kwargs ): super(EnrichmentProperties,", "identifies this endpoint. The name can only include alphanumeric characters,", "for requests captured by this rule. Possible values include: \"Accept\",", ":type id: str :param type: Response type. :type type: str", "location: str :param role: The role of the region, can", "Licensed under the MIT License. See License.txt in the project", "The shared access policies you can use to secure a", "condition is provided, it evaluates to true by default. For", "None) self.connection_string = kwargs['connection_string'] self.container_name = kwargs['container_name'] class TagsResource(msrest.serialization.Model): \"\"\"A", "it will evaluate to true by default. For grammar, See:", ":param comments: IoT hub comments. :type comments: str :param features:", "resource location. :type location: str :param tags: A set of", ":param message: Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param twin: Routing", "524288000, 'minimum': 10485760}, } _attribute_map = { 'connection_string': {'key': 'connectionString',", ":ivar type: the resource type. :vartype type: str :ivar etag:", "self).__init__(**kwargs) self.export_blob_container_uri = kwargs['export_blob_container_uri'] self.exclude_keys = kwargs['exclude_keys'] class FailoverInput(msrest.serialization.Model): \"\"\"Use", "'value', 'type': '[IotHubDescription]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def", "type. :param value: IotHub type. :type value: str :param localized_value:", "requesting an import of all devices in the hub. All", "Unit of IotHub type. :type unit: str :param current_value: Current", "IoT Hub for file upload is valid. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload#file-upload-notification-configuration-options.", "'current_value': {'readonly': True}, 'max_value': {'readonly': True}, } _attribute_map = {", "{'required': True}, 'output_blob_container_uri': {'required': True}, } _attribute_map = { 'input_blob_container_uri':", "to be thrown. Not specifying a value for this property", "hubs. :type routes: list[~azure.mgmt.iothub.v2019_11_04.models.RouteProperties] :param fallback_route: The properties of the", "self.certificate = None class CertificateVerificationDescription(msrest.serialization.Model): \"\"\"The JSON-serialized leaf certificate. :param", "kwargs['name'] self.subscription_id = kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group', None) self.container_name", "None) class Operation(msrest.serialization.Model): \"\"\"IoT Hub REST API operation. Variables are", "\"\"\"The routing related properties of the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging.", ":ivar total_device_count: The total count of devices in the identity", "{'key': 'currentValue', 'type': 'int'}, 'limit': {'key': 'limit', 'type': 'int'}, 'name':", "must be populated in order to send to Azure. :param", "set of tags. Twin Tags. :type tags: object :param properties:", "the normal ETag convention. :type etag: str :param properties: IotHub", "\"\"\" _validation = { 'connection_string': {'required': True}, 'name': {'required': True,", "endpoints to which the messages that satisfy the condition are", "'type': '[Operation]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(", "true by default. For grammar, See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str", "'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): super(JobResponseListResult,", "Etag field is *not* required. If it is provided in", "{'key': 'end', 'type': 'RouteErrorPosition'}, } def __init__( self, **kwargs ):", "capacity: The number of provisioned IoT Hub units. See: https://docs.microsoft.com/azure/azure-subscription-service-limits#iot-hub-limits.", "True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[JobResponse]'},", "'str'}, 'host_name': {'key': 'hostName', 'type': 'str'}, 'event_hub_endpoints': {'key': 'eventHubEndpoints', 'type':", "'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type':", "rules. :type ip_filter_rules: list[~azure.mgmt.iothub.v2019_11_04.models.IpFilterRule] :ivar provisioning_state: The provisioning state. :vartype", "self).__init__(**kwargs) self.name = kwargs['name'] class OperationListResult(msrest.serialization.Model): \"\"\"Result of the request", "server, and will be ignored when sending a request. :param", "None self.enabled_device_count = None self.disabled_device_count = None class RouteCompilationError(msrest.serialization.Model): \"\"\"Compilation", "def __init__( self, **kwargs ): super(RoutingEndpoints, self).__init__(**kwargs) self.service_bus_queues = kwargs.get('service_bus_queues',", "The max delivery count for cloud-to-device messages in the device", "Currently you can configure only one Azure Storage account and", "{ 'desired': {'key': 'desired', 'type': 'object'}, 'reported': {'key': 'reported', 'type':", "_attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'current_value': {'key':", "filter rules for the IoT hub. All required parameters must", "Possible values include: \"Avro\", \"AvroDeflate\", \"JSON\". :type encoding: str or", "\"\"\"The properties of the provisioned Event Hub-compatible endpoint used by", "list[~azure.mgmt.iothub.v2019_11_04.models.IotHubLocationDescription] \"\"\" _validation = { 'provisioning_state': {'readonly': True}, 'state': {'readonly':", "def __init__( self, **kwargs ): super(RouteErrorRange, self).__init__(**kwargs) self.start = kwargs.get('start',", "tags. The resource tags. :type tags: dict[str, str] \"\"\" _validation", "self.is_verified = None self.created = None self.updated = None self.verification_code", "'type': 'bool'}, } def __init__( self, **kwargs ): super(FallbackRouteProperties, self).__init__(**kwargs)", "id: IotHub type id. :type id: str :param type: Response", "**kwargs ): super(FailoverInput, self).__init__(**kwargs) self.failover_region = kwargs['failover_region'] class FallbackRouteProperties(msrest.serialization.Model): \"\"\"The", ":type input_blob_container_uri: str :param output_blob_container_uri: Required. The output blob container", "self).__init__(**kwargs) self.max_delivery_count = kwargs.get('max_delivery_count', None) self.default_ttl_as_iso8601 = kwargs.get('default_ttl_as_iso8601', None) self.feedback", "= { 'provider': {'key': 'provider', 'type': 'str'}, 'resource': {'key': 'resource',", "self).__init__(**kwargs) self.authorization_policies = kwargs.get('authorization_policies', None) self.ip_filter_rules = kwargs.get('ip_filter_rules', None) self.provisioning_state", "be ignored when sending a request. :param properties: The description", "'action', 'type': 'str'}, 'ip_mask': {'key': 'ipMask', 'type': 'str'}, } def", "be ignored when sending a request. :ivar name: Operation name:", "the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type default_ttl_as_iso8601: ~datetime.timedelta :param", "'routingSource', 'type': 'str'}, 'message': {'key': 'message', 'type': 'RoutingMessage'}, 'twin': {'key':", "super(SharedAccessSignatureAuthorizationRule, self).__init__(**kwargs) self.key_name = kwargs['key_name'] self.primary_key = kwargs.get('primary_key', None) self.secondary_key", "None) self.column = kwargs.get('column', None) class RouteErrorRange(msrest.serialization.Model): \"\"\"Range of route", "_attribute_map = { 'value': {'key': 'value', 'type': '[Operation]'}, 'next_link': {'key':", "'Name'}, } def __init__( self, **kwargs ): super(UserSubscriptionQuota, self).__init__(**kwargs) self.id", ":param app_properties: App properties. :type app_properties: dict[str, str] :param system_properties:", "tags: A set of tags. Resource tags. :type tags: dict[str,", "~datetime.datetime :ivar verification_code: The certificate's verification code that will be", "verification_code: str :ivar certificate: The certificate content. :vartype certificate: str", "{'key': 'statusMessage', 'type': 'str'}, 'parent_job_id': {'key': 'parentJobId', 'type': 'str'}, }", "None) self.next_link = None class EventHubProperties(msrest.serialization.Model): \"\"\"The properties of the", "True}, 'action': {'required': True}, 'ip_mask': {'required': True}, } _attribute_map =", ":type current_value: int :param limit: Numerical limit on IotHub type.", "{ 'service_bus_queues': {'key': 'serviceBusQueues', 'type': '[RoutingServiceBusQueueEndpointProperties]'}, 'service_bus_topics': {'key': 'serviceBusTopics', 'type':", "\"DeviceManagement\". :type features: str or ~azure.mgmt.iothub.v2019_11_04.models.Capabilities :ivar locations: Primary and", "{'key': 'currentValue', 'type': 'long'}, 'max_value': {'key': 'maxValue', 'type': 'long'}, }", "self.next_link = None class EventHubProperties(msrest.serialization.Model): \"\"\"The properties of the provisioned", "for which the SAS URI generated by IoT Hub for", "self.location = kwargs.get('location', None) class RouteErrorPosition(msrest.serialization.Model): \"\"\"Position where the route", "storage account. :type connection_string: str :param name: Required. The name", "endpoints: The properties related to the custom endpoints to which", "for iot hub. :vartype locations: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubLocationDescription] \"\"\" _validation = {", "= kwargs['connection_string'] self.container_name = kwargs['container_name'] class TagsResource(msrest.serialization.Model): \"\"\"A container holding", "upload files. The container need not exist but should be", "{ 'value': {'readonly': True}, 'next_link': {'readonly': True}, } _attribute_map =", "of the service bus topic endpoint. :type resource_group: str \"\"\"", "sending a request. :ivar name: Operation name: {provider}/{resource}/{read | write", "certificate .cer file or just .pem file content. :type certificate:", "self, **kwargs ): super(Operation, self).__init__(**kwargs) self.name = None self.display =", "ignored when sending a request. :param properties: The tags. :type", "'unit', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'int'}, 'limit': {'key':", "endpoints to which messages that satisfy the condition are routed.", "self).__init__(**kwargs) self.id = kwargs.get('id', None) self.type = kwargs.get('type', None) self.unit", "of an IoT hub. Variables are only populated by the", "kwargs['output_blob_container_uri'] class IotHubCapacity(msrest.serialization.Model): \"\"\"IoT Hub capacity information. Variables are only", "ignored when sending a request. :ivar job_id: The job identifier.", "a message on the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type", ":type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _attribute_map = { 'routing_source': {'key': 'routingSource',", "address range in CIDR notation for the rule. :type ip_mask:", "types. All required parameters must be populated in order to", ":type value: list[~azure.mgmt.iothub.v2019_11_04.models.CertificateDescription] \"\"\" _attribute_map = { 'value': {'key': 'value',", "def __init__( self, **kwargs ): super(RoutingStorageContainerProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string']", "applies to messages to be delivered to built-in and custom", "message. :type body: str :param app_properties: App properties. :type app_properties:", "__init__( self, **kwargs ): super(EventHubConsumerGroupsListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None)", "queue endpoint. :type subscription_id: str :param resource_group: The name of", "list[~azure.mgmt.iothub.v2019_11_04.models.UserSubscriptionQuota] :ivar next_link: :vartype next_link: str \"\"\" _validation = {", "{'key': 'failoverRegion', 'type': 'str'}, } def __init__( self, **kwargs ):", "order to send to Azure. :param export_blob_container_uri: Required. The export", "\"\"\" _validation = { 'subject': {'readonly': True}, 'expiry': {'readonly': True},", "**kwargs ): super(OperationInputs, self).__init__(**kwargs) self.name = kwargs['name'] class OperationListResult(msrest.serialization.Model): \"\"\"Result", "lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601: The period of time for which", "the challenge nonce issued for the Proof-Of-Possession flow. :type properties:", "{'readonly': True, 'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly': True}, 'location': {'required': True},", "service bus queue endpoint. :type resource_group: str \"\"\" _validation =", "): super(RoutingEventHubProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id", "is an optional parameter. :param tags: A set of tags.", "when sending a request. :ivar name_available: The value which indicates", "{ 'value': {'key': 'value', 'type': '[Operation]'}, 'next_link': {'key': 'nextLink', 'type':", "1}, } _attribute_map = { 'max_delivery_count': {'key': 'maxDeliveryCount', 'type': 'int'},", "{ 'connection_string': {'required': True}, 'container_name': {'required': True}, } _attribute_map =", "{'required': True}, } _attribute_map = { 'message': {'key': 'message', 'type':", "'type': 'str'}, } def __init__( self, **kwargs ): super(EventHubConsumerGroupInfo, self).__init__(**kwargs)", "The job identifier. :vartype job_id: str :ivar start_time_utc: The start", "User subscription quota response. Variables are only populated by the", "name can only include alphanumeric characters, periods, underscores, hyphens, has", "'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'etag': {'key': 'etag',", "str \"\"\" _validation = { 'connection_string': {'required': True}, 'name': {'required':", "server, and will be ignored when sending a request. All", "order to send to Azure. :param message: Routing message. :type", ":type enable_file_upload_notifications: bool :param cloud_to_device: The IoT hub cloud-to-device messaging", "apply the routing rule. If no condition is provided, it", "super(Name, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.localized_value = kwargs.get('localized_value', None)", "self.end_time_utc = None self.type = None self.status = None self.failure_reason", "self.expiry = None self.thumbprint = None self.is_verified = None self.created", "detailed reason message. :type message: str \"\"\" _validation = {", "'[RoutingStorageContainerProperties]'}, } def __init__( self, **kwargs ): super(RoutingEndpoints, self).__init__(**kwargs) self.service_bus_queues", "314572800(300MB). :type max_chunk_size_in_bytes: int :param encoding: Encoding that is used", "'subject': {'key': 'subject', 'type': 'str'}, 'expiry': {'key': 'expiry', 'type': 'rfc-1123'},", "\"\"\"IoT Hub REST API operation. Variables are only populated by", "'container_name': {'required': True}, } _attribute_map = { 'sas_ttl_as_iso8601': {'key': 'sasTtlAsIso8601',", "self.severity = kwargs.get('severity', None) self.location = kwargs.get('location', None) class RouteErrorPosition(msrest.serialization.Model):", "string of the storage account. :type connection_string: str :param name:", "{'required': True}, } _attribute_map = { 'filter_name': {'key': 'filterName', 'type':", "between 10485760(10MB) and 524288000(500MB). Default value is 314572800(300MB). :type max_chunk_size_in_bytes:", "a message. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload. :type max_delivery_count: int \"\"\" _validation =", "that IoT hub routes messages to, based on the routing", "information. Variables are only populated by the server, and will", "None self.disabled_device_count = None class RouteCompilationError(msrest.serialization.Model): \"\"\"Compilation error when evaluating", "Possible values include: \"Automatic\", \"Manual\", \"None\". :vartype scale_type: str or", "list[~azure.mgmt.iothub.v2019_11_04.models.MatchedRoute] \"\"\" _attribute_map = { 'routes': {'key': 'routes', 'type': '[MatchedRoute]'},", "testing route. Possible values include: \"undefined\", \"false\", \"true\". :type result:", ":param reported: Twin desired properties. :type reported: object \"\"\" _attribute_map", "'type': 'str'}, } def __init__( self, **kwargs ): super(ErrorDetails, self).__init__(**kwargs)", "list of endpoints for which the enrichment is applied to", "deliver a message on the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages.", "indicating whether keys should be excluded during export. :type exclude_keys:", "): super(RoutingServiceBusTopicEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id", "): super(StorageEndpointProperties, self).__init__(**kwargs) self.sas_ttl_as_iso8601 = kwargs.get('sas_ttl_as_iso8601', None) self.connection_string = kwargs['connection_string']", "{'readonly': True}, 'etag': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map", "'status_message': {'readonly': True}, 'parent_job_id': {'readonly': True}, } _attribute_map = {", "identifier. :vartype id: str :ivar name: The Event Hub-compatible consumer", "{ 'certificate': {'key': 'certificate', 'type': 'str'}, } def __init__( self,", "endpoint types for free hubs. :param service_bus_queues: The list of", "the service bus queue endpoint. :type connection_string: str :param name:", ":vartype thumbprint: str :ivar is_verified: Determines whether certificate has been", "def __init__( self, **kwargs ): super(IotHubDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value',", "{ 'properties': {'key': 'properties', 'type': 'RouteProperties'}, } def __init__( self,", "The name need not be the same as the actual", "in the hub. All required parameters must be populated in", "capacity: Required. IotHub capacity. :type capacity: ~azure.mgmt.iothub.v2019_11_04.models.IotHubCapacity \"\"\" _validation =", "None) self.properties = kwargs.get('properties', None) self.sku = kwargs['sku'] class IotHubDescriptionListResult(msrest.serialization.Model):", "feedback: The properties of the feedback queue for cloud-to-device messages.", "{'key': 'path', 'type': 'str'}, 'endpoint': {'key': 'endpoint', 'type': 'str'}, }", "the IoT hub to check. :type name: str \"\"\" _validation", "to healthy when IoT Hub has established an eventually consistent", "True, 'max_items': 1, 'min_items': 1}, 'is_enabled': {'required': True}, } _attribute_map", "'routing_source': {'key': 'routingSource', 'type': 'str'}, 'message': {'key': 'message', 'type': 'RoutingMessage'},", "delete}. :vartype name: str :param display: The object that represents", ":param batch_frequency_in_seconds: Time interval at which blobs are written to", "Required. The list of endpoints to which the messages that", "that matched. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties \"\"\" _attribute_map = { 'properties':", "in order to send to Azure. :param filter_name: Required. The", "{ 'minimum': {'readonly': True, 'maximum': 1, 'minimum': 1}, 'maximum': {'readonly':", "devices in the IoT hub. All required parameters must be", "self.id = None self.name = None self.etag = None self.type", "resource is provisioned. :param location: The name of the Azure", "self).__init__(**kwargs) self.filter_name = kwargs['filter_name'] self.action = kwargs['action'] self.ip_mask = kwargs['ip_mask']", "queue for cloud-to-device messages. :type feedback: ~azure.mgmt.iothub.v2019_11_04.models.FeedbackProperties \"\"\" _validation =", "the shared access policy. :type key_name: str :param primary_key: The", "enable_file_upload_notifications: bool :param cloud_to_device: The IoT hub cloud-to-device messaging properties.", "CertificateListDescription(msrest.serialization.Model): \"\"\"The JSON-serialized array of Certificate objects. :param value: The", "'str'}, } def __init__( self, **kwargs ): super(JobResponse, self).__init__(**kwargs) self.job_id", "with endpoints. The 'unknown' status shows that the IoT Hub", "of the SKU. Possible values include: \"F1\", \"S1\", \"S2\", \"S3\",", "DeviceMessages. Possible values include: \"Invalid\", \"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type", "None self.event_hub_endpoints = kwargs.get('event_hub_endpoints', None) self.routing = kwargs.get('routing', None) self.storage_endpoints", "'name', 'type': 'str'}, } def __init__( self, **kwargs ): super(OperationInputs,", "None) self.location = kwargs.get('location', None) class RouteErrorPosition(msrest.serialization.Model): \"\"\"Position where the", "= kwargs.get('name', None) self.source = kwargs['source'] self.condition = kwargs.get('condition', None)", "and features enabled for the IoT hub. Possible values include:", "self.service_bus_topics = kwargs.get('service_bus_topics', None) self.event_hubs = kwargs.get('event_hubs', None) self.storage_containers =", "manual Failover for a hub. All required parameters must be", "\"\"\"Input values. All required parameters must be populated in order", ":param result: Result of testing route. Possible values include: \"undefined\",", "self.route = kwargs['route'] self.twin = kwargs.get('twin', None) class TestRouteResult(msrest.serialization.Model): \"\"\"Result", "'feedback', 'type': 'FeedbackProperties'}, } def __init__( self, **kwargs ): super(CloudToDeviceProperties,", "Job Response object. Variables are only populated by the server,", "self.value = kwargs.get('value', None) self.next_link = None class IotHubSkuDescription(msrest.serialization.Model): \"\"\"SKU", ":type tags: object :param properties: :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwinProperties \"\"\" _attribute_map", "region is where the IoT hub is currently provisioned. The", "string for the Azure Storage account to which files are", "self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id = kwargs.get('subscription_id',", "self.path = None self.endpoint = None class ExportDevicesRequest(msrest.serialization.Model): \"\"\"Use to", "class RoutingProperties(msrest.serialization.Model): \"\"\"The routing related properties of the IoT hub.", "class IotHubQuotaMetricInfo(msrest.serialization.Model): \"\"\"Quota metrics properties. Variables are only populated by", ":type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _validation = { 'route': {'required': True},", ":param current_value: Current number of IotHub type. :type current_value: int", "True}, 'certificate': {'readonly': True}, } _attribute_map = { 'subject': {'key':", "to Azure. :param filter_name: Required. The name of the IP", "True}, 'current_value': {'readonly': True}, 'max_value': {'readonly': True}, } _attribute_map =", "to Azure. :ivar resource_type: The type of the resource. :vartype", "kwargs.get('max_delivery_count', None) class ImportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters when requesting", "): super(OperationDisplay, self).__init__(**kwargs) self.provider = None self.resource = None self.operation", "} def __init__( self, **kwargs ): super(IotHubQuotaMetricInfoListResult, self).__init__(**kwargs) self.value =", "'str'}, 'message': {'key': 'message', 'type': 'str'}, } def __init__( self,", "upload. All required parameters must be populated in order to", "'[RouteCompilationError]'}, } def __init__( self, **kwargs ): super(TestRouteResultDetails, self).__init__(**kwargs) self.compilation_errors", "self.resource_group = kwargs.get('resource_group', None) class RoutingServiceBusTopicEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to", "list of shared access policies. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :ivar next_link:", "when sending a request. :ivar name: Operation name: {provider}/{resource}/{read |", "= { 'name_available': {'key': 'nameAvailable', 'type': 'bool'}, 'reason': {'key': 'reason',", "rules. A maximum of 10 custom endpoints are allowed across", "r'^[A-Za-z0-9-._]{1,64}$'}, 'container_name': {'required': True}, 'batch_frequency_in_seconds': {'maximum': 720, 'minimum': 60}, 'max_chunk_size_in_bytes':", "a request. :param properties: The description of an X509 CA", "\"DeviceConnect\", \"RegistryRead, RegistryWrite\", \"RegistryRead, ServiceConnect\", \"RegistryRead, DeviceConnect\", \"RegistryWrite, ServiceConnect\", \"RegistryWrite,", "Azure Storage endpoints where you can upload files. Currently you", "the routing rule is to be applied to, such as", "of partitions for receiving device-to-cloud messages in the Event Hub-compatible", "of Service Bus queue endpoints that IoT hub routes the", "'str'}, } def __init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRule, self).__init__(**kwargs) self.key_name", "container_name: Required. The name of the root container where you", "~datetime.timedelta :param connection_string: Required. The connection string for the Azure", "of disabled devices in the identity registry. :vartype disabled_device_count: long", "subscription_id: str :param resource_group: The name of the resource group", "queue. :type messaging_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.MessagingEndpointProperties] :param enable_file_upload_notifications: If True, file", "{ 'name': {'readonly': True}, 'current_value': {'readonly': True}, 'max_value': {'readonly': True},", "= { 'properties': {'key': 'properties', 'type': '{str}'}, 'id': {'key': 'id',", "properties of the feedback queue for cloud-to-device messages. :type feedback:", "\"\"\"Range of route errors. :param start: Start where the route", "job. :vartype start_time_utc: ~datetime.datetime :ivar end_time_utc: The time the job", "{'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs ):", "or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition that is evaluated to", "only one Azure Storage account and that MUST have its", "= kwargs.get('service_bus_queues', None) self.service_bus_topics = kwargs.get('service_bus_topics', None) self.event_hubs = kwargs.get('event_hubs',", "the hub. All required parameters must be populated in order", "connection_string: Required. The connection string of the service bus queue", "Result of testing route. Possible values include: \"undefined\", \"false\", \"true\".", "{'key': 'name', 'type': 'str'}, 'tier': {'key': 'tier', 'type': 'str'}, 'capacity':", "server, and will be ignored when sending a request. :ivar", "self).__init__(**kwargs) self.resource_type = None self.sku = kwargs['sku'] self.capacity = kwargs['capacity']", "\"\"\" _attribute_map = { 'value': {'key': 'value', 'type': '[CertificateDescription]'}, }", "__init__( self, **kwargs ): super(ImportDevicesRequest, self).__init__(**kwargs) self.input_blob_container_uri = kwargs['input_blob_container_uri'] self.output_blob_container_uri", "bus queue endpoint. :type subscription_id: str :param resource_group: The name", "True}, 'name': {'readonly': True}, 'etag': {'readonly': True}, 'type': {'readonly': True},", "'object'}, 'reported': {'key': 'reported', 'type': 'object'}, } def __init__( self,", "_attribute_map = { 'value': {'key': 'value', 'type': 'str'}, 'localized_value': {'key':", "class TestRouteInput(msrest.serialization.Model): \"\"\"Input for testing route. All required parameters must", "self.job_id = None self.start_time_utc = None self.end_time_utc = None self.type", "**kwargs ): super(IotHubSkuDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link =", "**kwargs ): super(RouteErrorRange, self).__init__(**kwargs) self.start = kwargs.get('start', None) self.end =", "which a message is available to consume before it is", "True}, 'path': {'readonly': True}, 'endpoint': {'readonly': True}, } _attribute_map =", "'body', 'type': 'str'}, 'app_properties': {'key': 'appProperties', 'type': '{str}'}, 'system_properties': {'key':", "{'key': 'created', 'type': 'rfc-1123'}, 'updated': {'key': 'updated', 'type': 'rfc-1123'}, 'verification_code':", "None) self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601', None) self.max_delivery_count = kwargs.get('max_delivery_count', None) class", "self, **kwargs ): super(TestRouteResult, self).__init__(**kwargs) self.result = kwargs.get('result', None) self.details", "is_verified: bool :ivar created: The certificate's create date and time.", "**kwargs ): super(FeedbackProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601 =", "self.max_chunk_size_in_bytes = kwargs.get('max_chunk_size_in_bytes', None) self.encoding = kwargs.get('encoding', None) class RoutingTwin(msrest.serialization.Model):", "date and time. :vartype created: ~datetime.datetime :ivar updated: The certificate's", "ServiceConnect\", \"RegistryWrite, DeviceConnect\", \"ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect\", \"RegistryRead, RegistryWrite,", "'ipFilterRules', 'type': '[IpFilterRule]'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'state': {'key':", "'type': 'str'}, } def __init__( self, **kwargs ): super(EndpointHealthData, self).__init__(**kwargs)", "'end_time_utc': {'readonly': True}, 'type': {'readonly': True}, 'status': {'readonly': True}, 'failure_reason':", "compilation errors. :type compilation_errors: list[~azure.mgmt.iothub.v2019_11_04.models.RouteCompilationError] \"\"\" _attribute_map = { 'compilation_errors':", "Specifying more than one storage account causes an error to", "'resourceGroup', 'type': 'str'}, } def __init__( self, **kwargs ): super(RoutingServiceBusTopicEndpointProperties,", "The type of the resource. :vartype resource_type: str :param sku:", "True}, 'updated': {'readonly': True}, 'verification_code': {'readonly': True}, 'certificate': {'readonly': True},", "The list of Service Bus queue endpoints that IoT hub", "of Service Bus topic endpoints that the IoT hub routes", "route error happened. :type column: int \"\"\" _attribute_map = {", "= { 'value': {'readonly': True}, 'next_link': {'readonly': True}, } _attribute_map", "{ 'input_blob_container_uri': {'required': True}, 'output_blob_container_uri': {'required': True}, } _attribute_map =", "kwargs.get('value', None) self.next_link = None class IotHubLocationDescription(msrest.serialization.Model): \"\"\"Public representation of", "to-device-messages. :type max_delivery_count: int \"\"\" _validation = { 'max_delivery_count': {'maximum':", "self.etag = None self.type = None class CertificateListDescription(msrest.serialization.Model): \"\"\"The JSON-serialized", "{'readonly': True}, 'description': {'readonly': True}, } _attribute_map = { 'provider':", "enrichments: The list of user-provided enrichments that the IoT hub", "the condition are routed to. Currently only 1 endpoint is", "data. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthData] :ivar next_link: Link to more results.", "'partitionIds', 'type': '[str]'}, 'path': {'key': 'path', 'type': 'str'}, 'endpoint': {'key':", "will be ignored when sending a request. :param value: JSON-serialized", "units. :vartype default: long :ivar scale_type: The type of the", "the response body, it must also be provided as a", "include the built-in Event Hubs endpoint. :type event_hubs: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingEventHubProperties] :param", "= None class IotHubSkuInfo(msrest.serialization.Model): \"\"\"Information about the SKU of the", "None) self.storage_endpoints = kwargs.get('storage_endpoints', None) self.messaging_endpoints = kwargs.get('messaging_endpoints', None) self.enable_file_upload_notifications", "{'required': True}, } _attribute_map = { 'input_blob_container_uri': {'key': 'inputBlobContainerUri', 'type':", "to the shared access policy. Possible values include: \"RegistryRead\", \"RegistryWrite\",", "capabilities and features enabled for the IoT hub. Possible values", "'name': {'key': 'name', 'type': 'str'}, } def __init__( self, **kwargs", "an error to be thrown. Not specifying a value for", "'is_verified': {'readonly': True}, 'created': {'readonly': True}, 'updated': {'readonly': True}, }", "'operation', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, } def", "_attribute_map = { 'value': {'key': 'value', 'type': '[IotHubSkuDescription]'}, 'next_link': {'key':", "{'key': 'feedback', 'type': 'FeedbackProperties'}, } def __init__( self, **kwargs ):", "the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :type routing: ~azure.mgmt.iothub.v2019_11_04.models.RoutingProperties :param storage_endpoints:", "description: Description of the operation. :vartype description: str \"\"\" _validation", "\"\"\"The health data for an endpoint. :param endpoint_id: Id of", "See IoT Hub metrics to identify errors and monitor issues", "} def __init__( self, **kwargs ): super(OperationDisplay, self).__init__(**kwargs) self.provider =", "None) self.max_chunk_size_in_bytes = kwargs.get('max_chunk_size_in_bytes', None) self.encoding = kwargs.get('encoding', None) class", "True}, 'etag': {'readonly': True}, } _attribute_map = { 'properties': {'key':", "properties of the fallback route. IoT Hub uses these properties", "'partition_ids': {'key': 'partitionIds', 'type': '[str]'}, 'path': {'key': 'path', 'type': 'str'},", "available. :vartype name_available: bool :ivar reason: The reason for unavailability.", "objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubDescription] :ivar next_link: The next link. :vartype", "'details': {'key': 'details', 'type': 'TestRouteResultDetails'}, } def __init__( self, **kwargs", "Hub operations. It contains a list of operations and a", "'[EnrichmentProperties]'}, } def __init__( self, **kwargs ): super(RoutingProperties, self).__init__(**kwargs) self.endpoints", "True}, } _attribute_map = { 'value': {'key': 'value', 'type': '[EndpointHealthData]'},", "None) self.details = kwargs.get('details', None) class TestRouteResultDetails(msrest.serialization.Model): \"\"\"Detailed result of", "of one of the locations where a resource is provisioned.", "'maxValue', 'type': 'long'}, } def __init__( self, **kwargs ): super(IotHubQuotaMetricInfo,", "hub will be failed over to. :type failover_region: str \"\"\"", "resource identifier. :vartype id: str :ivar name: The resource name.", "= { 'name': {'readonly': True}, 'current_value': {'readonly': True}, 'max_value': {'readonly':", "App properties. :type app_properties: dict[str, str] :param system_properties: System properties.", "'type': '[RoutingServiceBusTopicEndpointProperties]'}, 'event_hubs': {'key': 'eventHubs', 'type': '[RoutingEventHubProperties]'}, 'storage_containers': {'key': 'storageContainers',", "to route messages to built-in and custom endpoints. A maximum", "array of JobResponse objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.JobResponse] :ivar next_link: The", "= kwargs.get('body', None) self.app_properties = kwargs.get('app_properties', None) self.system_properties = kwargs.get('system_properties',", "'max_value': {'readonly': True}, } _attribute_map = { 'name': {'key': 'name',", "identifier. :vartype job_id: str :ivar start_time_utc: The start time of", "'endpoint': {'key': 'endpoint', 'type': 'str'}, } def __init__( self, **kwargs", "{'maximum': 100, 'minimum': 1}, } _attribute_map = { 'lock_duration_as_iso8601': {'key':", "list of Service Bus queue endpoints that IoT hub routes", "{ 'name': {'key': 'name', 'type': 'str'}, 'source': {'key': 'source', 'type':", ":vartype value: list[~azure.mgmt.iothub.v2019_11_04.models.Operation] :ivar next_link: URL to get the next", "IoT Hub has established an eventually consistent state of health.", ":type condition: str :param endpoint_names: Required. The list of endpoints", "'str'}, 'batch_frequency_in_seconds': {'key': 'batchFrequencyInSeconds', 'type': 'int'}, 'max_chunk_size_in_bytes': {'key': 'maxChunkSizeInBytes', 'type':", "str :ivar parent_job_id: The job identifier of the parent job,", "\"\"\"IoT Hub capacity information. Variables are only populated by the", "specify whether the fallback route is enabled. :type is_enabled: bool", "filter_name: Required. The name of the IP filter rule. :type", "over to. :type failover_region: str \"\"\" _validation = { 'failover_region':", "details: The error details. :vartype details: str \"\"\" _validation =", "that the IoT hub uses to route messages to built-in", ":type features: str or ~azure.mgmt.iothub.v2019_11_04.models.Capabilities :ivar locations: Primary and secondary", "secondary region is the Azure disaster recovery (DR) paired region", "input_blob_container_uri: Required. The input blob container URI. :type input_blob_container_uri: str", "in the 'routes' section are met. This is an optional", "ignored when sending a request. :ivar provider: Service provider: Microsoft", "the region where the IoT hub can failover to. Possible", "= kwargs.get('system_properties', None) class RoutingProperties(msrest.serialization.Model): \"\"\"The routing related properties of", "body, it must also be provided as a header per", "super(CertificateVerificationDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None) class CertificateWithNonceDescription(msrest.serialization.Model): \"\"\"The X509", "information. # Code generated by Microsoft (R) AutoRest Code Generator.", "values include: \"error\", \"warning\". :type severity: str or ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorSeverity :param", "hyphens, has a maximum length of 64 characters, and must", "'filter_name': {'key': 'filterName', 'type': 'str'}, 'action': {'key': 'action', 'type': 'str'},", "{'key': 'properties', 'type': 'RoutingTwinProperties'}, } def __init__( self, **kwargs ):", "of an enrichment that your IoT hub applies to messages", "The default time to live for cloud-to-device messages in the", "error message. :vartype message: str :ivar details: The error details.", "} _attribute_map = { 'value': {'key': 'value', 'type': '[EndpointHealthData]'}, 'next_link':", "Event Hubs endpoints that IoT hub routes messages to, based", "subscription identifier of the service bus topic endpoint. :type subscription_id:", "'type': 'str'}, } def __init__( self, **kwargs ): super(OperationListResult, self).__init__(**kwargs)", "'type': 'CloudToDeviceProperties'}, 'comments': {'key': 'comments', 'type': 'str'}, 'features': {'key': 'features',", "maximum length of 64 characters. The following names are reserved:", "period. See IoT Hub metrics to identify errors and monitor", "route messages to endpoints. All required parameters must be populated", "} def __init__( self, **kwargs ): super(RoutingMessage, self).__init__(**kwargs) self.body =", "'reported', 'type': 'object'}, } def __init__( self, **kwargs ): super(RoutingTwinProperties,", "True}, } _attribute_map = { 'properties': {'key': 'properties', 'type': '{str}'},", "The list of user-provided enrichments that the IoT hub applies", "None) self.routes = kwargs.get('routes', None) self.fallback_route = kwargs.get('fallback_route', None) self.enrichments", "also be provided as a header per the normal ETag", "of operation list results if there are any. :vartype next_link:", "super(TestRouteResultDetails, self).__init__(**kwargs) self.compilation_errors = kwargs.get('compilation_errors', None) class UserSubscriptionQuota(msrest.serialization.Model): \"\"\"User subscription", "str \"\"\" _validation = { 'value': {'readonly': True}, 'next_link': {'readonly':", "requesting manual Failover for a hub. All required parameters must", "for each blob written to storage. Value should be between", "blob container URI. :type output_blob_container_uri: str \"\"\" _validation = {", "identifier of the service bus queue endpoint. :type subscription_id: str", "hub. All required parameters must be populated in order to", "Hub-compatible endpoint used by the IoT hub. Variables are only", "self.event_hub_endpoints = kwargs.get('event_hub_endpoints', None) self.routing = kwargs.get('routing', None) self.storage_endpoints =", "scale_type: The type of the scaling enabled. Possible values include:", "{'readonly': True}, 'updated': {'readonly': True}, } _attribute_map = { 'subject':", "'str'}, } def __init__( self, **kwargs ): super(IotHubQuotaMetricInfoListResult, self).__init__(**kwargs) self.value", "kwargs.get('role', None) class IotHubNameAvailabilityInfo(msrest.serialization.Model): \"\"\"The properties indicating whether a given", "'max_value': {'key': 'maxValue', 'type': 'long'}, } def __init__( self, **kwargs", "class CertificateVerificationDescription(msrest.serialization.Model): \"\"\"The JSON-serialized leaf certificate. :param certificate: base-64 representation", "on the routing rules. A maximum of 10 custom endpoints", "_attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display': {'key':", "state. :vartype provisioning_state: str :ivar state: The hub state. :vartype", "is evaluated in order to apply the fallback route. If", "'str'}, 'expiry': {'key': 'expiry', 'type': 'rfc-1123'}, 'thumbprint': {'key': 'thumbprint', 'type':", "str :ivar status_message: The status message for the job. :vartype", "self.http_status_code = None self.message = None self.details = None class", "Determines whether certificate has been verified. :vartype is_verified: bool :ivar", "'twin', 'type': 'RoutingTwin'}, } def __init__( self, **kwargs ): super(TestAllRoutesInput,", "= { 'minimum': {'readonly': True, 'maximum': 1, 'minimum': 1}, 'maximum':", "'str'}, } def __init__( self, **kwargs ): super(FailoverInput, self).__init__(**kwargs) self.failover_region", "self, **kwargs ): super(CloudToDeviceProperties, self).__init__(**kwargs) self.max_delivery_count = kwargs.get('max_delivery_count', None) self.default_ttl_as_iso8601", "= kwargs.get('role', None) class IotHubNameAvailabilityInfo(msrest.serialization.Model): \"\"\"The properties indicating whether a", "when sending a request. :param authorization_policies: The shared access policies", "= None self.display = kwargs.get('display', None) class OperationDisplay(msrest.serialization.Model): \"\"\"The object", "self.message = kwargs.get('message', None) self.twin = kwargs.get('twin', None) class TestAllRoutesResult(msrest.serialization.Model):", "Hub type. :param value: IotHub type. :type value: str :param", "'RouteProperties'}, 'twin': {'key': 'twin', 'type': 'RoutingTwin'}, } def __init__( self,", ":param endpoints: The properties related to the custom endpoints to", "container need not exist but should be creatable using the", ":vartype parent_job_id: str \"\"\" _validation = { 'job_id': {'readonly': True},", "The subscription identifier of the storage account. :type subscription_id: str", "'operation': {'key': 'operation', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'},", "'name_available': {'readonly': True}, 'reason': {'readonly': True}, } _attribute_map = {", "'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'state': {'key': 'state', 'type': 'str'},", "{'readonly': True}, 'status': {'readonly': True}, 'failure_reason': {'readonly': True}, 'status_message': {'readonly':", ":param retention_time_in_days: The retention time for device-to-cloud messages in days.", "None) self.container_name = kwargs['container_name'] self.file_name_format = kwargs.get('file_name_format', None) self.batch_frequency_in_seconds =", "{'readonly': True}, } _attribute_map = { 'subject': {'key': 'subject', 'type':", "{'key': 'retentionTimeInDays', 'type': 'long'}, 'partition_count': {'key': 'partitionCount', 'type': 'int'}, 'partition_ids':", "ignored when sending a request. :param properties: The description of", "messaging_endpoints: The messaging endpoint properties for the file upload notification", "self, **kwargs ): super(RouteErrorPosition, self).__init__(**kwargs) self.line = kwargs.get('line', None) self.column", "__init__( self, **kwargs ): super(OperationInputs, self).__init__(**kwargs) self.name = kwargs['name'] class", "kwargs.get('fallback_route', None) self.enrichments = kwargs.get('enrichments', None) class RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model): \"\"\"The properties", "Required. The connection string of the service bus queue endpoint.", "is {iothub}/{partition}/{YYYY}/{MM}/{DD}/{HH}/{mm}. All parameters are mandatory but can be reordered.", "'provider': {'readonly': True}, 'resource': {'readonly': True}, 'operation': {'readonly': True}, 'description':", "in CIDR notation for the rule. :type ip_mask: str \"\"\"", "str or ~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthStatus \"\"\" _attribute_map = { 'endpoint_id': {'key': 'endpointId',", "'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, }", "{'key': 'endpointNames', 'type': '[str]'}, 'is_enabled': {'key': 'isEnabled', 'type': 'bool'}, }", "def __init__( self, **kwargs ): super(RoutingServiceBusQueueEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string']", "of the region, can be either primary or secondary. The", "'TestRouteResultDetails'}, } def __init__( self, **kwargs ): super(TestRouteResult, self).__init__(**kwargs) self.result", ":param is_enabled: Required. Used to specify whether a route is", "~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo \"\"\" _validation = { 'id': {'readonly': True}, 'name': {'readonly':", "used by the IoT hub. Variables are only populated by", "of the route error. Possible values include: \"error\", \"warning\". :type", "to Azure. :param input_blob_container_uri: Required. The input blob container URI.", "\"\"\" _attribute_map = { 'endpoints': {'key': 'endpoints', 'type': 'RoutingEndpoints'}, 'routes':", "{'key': 'exportBlobContainerUri', 'type': 'str'}, 'exclude_keys': {'key': 'excludeKeys', 'type': 'bool'}, }", "to Azure. :param key: Required. The key or name for", "URI. :type input_blob_container_uri: str :param output_blob_container_uri: Required. The output blob", "the IoT hub. All required parameters must be populated in", "= kwargs.get('value', None) self.next_link = None class EventHubProperties(msrest.serialization.Model): \"\"\"The properties", "__init__( self, **kwargs ): super(CertificateBodyDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None)", "self.partition_ids = None self.path = None self.endpoint = None class", "to which files are uploaded. :type connection_string: str :param container_name:", "endpoints used by this IoT hub. :param lock_duration_as_iso8601: The lock", "locations where a resource is provisioned. :param location: The name", "certificate: base-64 representation of the X509 leaf certificate .cer file", "= { 'routes': {'key': 'routes', 'type': '[MatchedRoute]'}, } def __init__(", "'properties', 'type': 'RoutingTwinProperties'}, } def __init__( self, **kwargs ): super(RoutingTwin,", "max_value: long \"\"\" _validation = { 'name': {'readonly': True}, 'current_value':", "'serviceBusTopics', 'type': '[RoutingServiceBusTopicEndpointProperties]'}, 'event_hubs': {'key': 'eventHubs', 'type': '[RoutingEventHubProperties]'}, 'storage_containers': {'key':", "The name of the IoT hub to check. :type name:", "def __init__( self, **kwargs ): super(TestAllRoutesInput, self).__init__(**kwargs) self.routing_source = kwargs.get('routing_source',", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload#file-upload-notification-configuration-options. :type sas_ttl_as_iso8601: ~datetime.timedelta :param connection_string: Required. The", "of Endpoint health data. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthData] :ivar next_link: Link", "\"\"\" _attribute_map = { 'value': {'key': 'value', 'type': 'str'}, 'localized_value':", "Id of the endpoint. :type endpoint_id: str :param health_status: Health", "sending a request. :ivar job_id: The job identifier. :vartype job_id:", "\"B3\". :type name: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubSku :ivar tier: The billing", "'str'}, 'tier': {'key': 'tier', 'type': 'str'}, 'capacity': {'key': 'capacity', 'type':", "'type': 'str'}, 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup',", "be populated in order to send to Azure. :param key_name:", "{'key': 'tags', 'type': '{str}'}, } def __init__( self, **kwargs ):", "properties related to service bus queue endpoint types. All required", "attempts to deliver a message. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload. :type max_delivery_count: int", "None self.certificate = None class CertificateVerificationDescription(msrest.serialization.Model): \"\"\"The JSON-serialized leaf certificate.", "allowed across all endpoint types for free hubs. :type endpoints:", "your IoT hub uses to route messages to endpoints. All", ":type type: str :param unit: Unit of IotHub type. :type", "and custom endpoints. A maximum of 100 routing rules are", "Azure. :param export_blob_container_uri: Required. The export blob container URI. :type", "disaster recovery (DR) paired region and also the region where", "self.fallback_route = kwargs.get('fallback_route', None) self.enrichments = kwargs.get('enrichments', None) class RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model):", "messages to blobs. Supported values are 'avro', 'avrodeflate', and 'JSON'.", "__init__( self, **kwargs ): super(FallbackRouteProperties, self).__init__(**kwargs) self.name = kwargs.get('name', None)", "no condition is provided, it evaluates to true by default.", "topic endpoint. :type resource_group: str \"\"\" _validation = { 'connection_string':", "\"\"\"Routing message. :param body: Body of routing message. :type body:", "enrichment property. :type key: str :param value: Required. The value", "str :param output_blob_container_uri: Required. The output blob container URI. :type", "} def __init__( self, **kwargs ): super(IotHubDescription, self).__init__(**kwargs) self.etag =", "\"\"\"Identity registry statistics. Variables are only populated by the server,", "self.key = kwargs['key'] self.value = kwargs['value'] self.endpoint_names = kwargs['endpoint_names'] class", "filter_name: str :param action: Required. The desired action for requests", "super(RoutingMessage, self).__init__(**kwargs) self.body = kwargs.get('body', None) self.app_properties = kwargs.get('app_properties', None)", "**kwargs ): super(RouteCompilationError, self).__init__(**kwargs) self.message = kwargs.get('message', None) self.severity =", "endpoints: ~azure.mgmt.iothub.v2019_11_04.models.RoutingEndpoints :param routes: The list of user-provided routing rules", "} _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'resource':", "super(EndpointHealthDataListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class", "~datetime.datetime :ivar end_time_utc: The time the job stopped processing. :vartype", "'connection_string': {'required': True}, 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'container_name': {'required':", "List of IoT Hub operations supported by the Microsoft.Devices resource", "be populated in order to send to Azure. :param message:", "= { 'next_link': {'readonly': True}, } _attribute_map = { 'value':", "'description': {'key': 'description', 'type': 'str'}, } def __init__( self, **kwargs", "body: str :param app_properties: App properties. :type app_properties: dict[str, str]", "when requesting an import of all devices in the hub.", "include: \"Invalid\", \"AlreadyExists\". :vartype reason: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubNameUnavailabilityReason :param message:", "X509 Certificate. Variables are only populated by the server, and", "True}, 'type': {'readonly': True}, 'status': {'readonly': True}, 'failure_reason': {'readonly': True},", "def __init__( self, **kwargs ): super(IotHubCapacity, self).__init__(**kwargs) self.minimum = None", "hub routes messages based on the routing rules. A maximum", "= None self.location = kwargs['location'] self.tags = kwargs.get('tags', None) class", "send to Azure. :param key_name: Required. The name of the", "self.system_properties = kwargs.get('system_properties', None) class RoutingProperties(msrest.serialization.Model): \"\"\"The routing related properties", "Azure. :param key_name: Required. The name of the shared access", "super(EventHubConsumerGroupsListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class", "'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'},", "= { 'name': {'key': 'name', 'type': 'str'}, } def __init__(", "'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'failure_reason': {'key': 'failureReason',", "name need not be the same as the actual queue", "= kwargs.get('compilation_errors', None) class UserSubscriptionQuota(msrest.serialization.Model): \"\"\"User subscription quota response. :param", "kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class FeedbackProperties(msrest.serialization.Model): \"\"\"The properties of the", "leaf certificate. :param certificate: base-64 representation of X509 certificate .cer", "of routes that matched. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties \"\"\" _attribute_map =", "super(TestRouteResult, self).__init__(**kwargs) self.result = kwargs.get('result', None) self.details = kwargs.get('details', None)", "must be populated in order to send to Azure. :ivar", "to send to Azure. :param key: Required. The key or", "same as the actual topic name. :type name: str :param", "endpoint_names: Required. The list of endpoints to which the messages", "established a connection with the endpoint. No messages have been", "Hub retried sending messages for the retrial period. See IoT", "the file upload notification queue. :type messaging_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.MessagingEndpointProperties] :param", "): super(RoutingTwin, self).__init__(**kwargs) self.tags = kwargs.get('tags', None) self.properties = kwargs.get('properties',", "message: str :param severity: Severity of the route error. Possible", "True}, } _attribute_map = { 'message': {'key': 'message', 'type': 'RoutingMessage'},", "self.failover_region = kwargs['failover_region'] class FallbackRouteProperties(msrest.serialization.Model): \"\"\"The properties of the fallback", "Hub-compatible endpoint properties. The only possible keys to this dictionary", "causes an error to be thrown. :type storage_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.StorageEndpointProperties]", "type of the resource. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo :param capacity: Required.", "'type': '[IotHubSkuDescription]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__(", "any of the conditions specified in the 'routes' section get", "'key': {'required': True}, 'value': {'required': True}, 'endpoint_names': {'required': True, 'min_items':", "routing related properties of the IoT hub. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :type", "} def __init__( self, **kwargs ): super(IotHubDescriptionListResult, self).__init__(**kwargs) self.value =", "related to a storage container endpoint. All required parameters must", "hub cloud-to-device messaging properties. :param max_delivery_count: The max delivery count", ":param properties: The description of an X509 CA Certificate including", "None) class RoutingEventHubProperties(msrest.serialization.Model): \"\"\"The properties related to an event hub", "} def __init__( self, **kwargs ): super(IotHubSkuDescriptionListResult, self).__init__(**kwargs) self.value =", "set of tags. The resource tags. :type tags: dict[str, str]", "of quota metrics objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubQuotaMetricInfo] :ivar next_link: The", "int :ivar partition_ids: The partition ids in the Event Hub-compatible", "Azure. :param connection_string: Required. The connection string of the storage", "= { 'name': {'required': True}, 'tier': {'readonly': True}, } _attribute_map", "kwargs.get('location', None) class RouteErrorPosition(msrest.serialization.Model): \"\"\"Position where the route error happened.", "DeviceConnect\". :type rights: str or ~azure.mgmt.iothub.v2019_11_04.models.AccessRights \"\"\" _validation = {", "def __init__( self, **kwargs ): super(FeedbackProperties, self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601',", "TestRouteInput(msrest.serialization.Model): \"\"\"Input for testing route. All required parameters must be", "'str'}, } def __init__( self, **kwargs ): super(RoutingStorageContainerProperties, self).__init__(**kwargs) self.connection_string", "reported: object \"\"\" _attribute_map = { 'desired': {'key': 'desired', 'type':", "self, **kwargs ): super(IotHubNameAvailabilityInfo, self).__init__(**kwargs) self.name_available = None self.reason =", "_validation = { 'filter_name': {'required': True}, 'action': {'required': True}, 'ip_mask':", "self.messaging_endpoints = kwargs.get('messaging_endpoints', None) self.enable_file_upload_notifications = kwargs.get('enable_file_upload_notifications', None) self.cloud_to_device =", "def __init__( self, **kwargs ): super(CertificateDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties',", "include: \"Accept\", \"Reject\". :type action: str or ~azure.mgmt.iothub.v2019_11_04.models.IpFilterActionType :param ip_mask:", "generated by Microsoft (R) AutoRest Code Generator. # Changes may", ":type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuDescription] :ivar next_link: The next link. :vartype next_link:", "def __init__( self, **kwargs ): super(IpFilterRule, self).__init__(**kwargs) self.filter_name = kwargs['filter_name']", "kwargs.get('endpoints', None) self.routes = kwargs.get('routes', None) self.fallback_route = kwargs.get('fallback_route', None)", "{'key': 'routes', 'type': '[MatchedRoute]'}, } def __init__( self, **kwargs ):", "storage account causes an error to be thrown. Not specifying", "to consume before it is expired by the IoT hub.", "'type': '[EnrichmentProperties]'}, } def __init__( self, **kwargs ): super(RoutingProperties, self).__init__(**kwargs)", "str] \"\"\" _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'},", "True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, } _attribute_map =", "'endpoint': {'readonly': True}, } _attribute_map = { 'retention_time_in_days': {'key': 'retentionTimeInDays',", "End where the route error happened. :type end: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition \"\"\"", "of the quota metric. :vartype max_value: long \"\"\" _validation =", "'etag': {'readonly': True}, 'type': {'readonly': True}, } _attribute_map = {", "None) class CertificatePropertiesWithNonce(msrest.serialization.Model): \"\"\"The description of an X509 CA Certificate", ":type properties: dict[str, str] :ivar id: The Event Hub-compatible consumer", "'certificate', 'type': 'str'}, } def __init__( self, **kwargs ): super(CertificatePropertiesWithNonce,", "a maximum of 5 routing rules are allowed for free", "super(IotHubDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class", "TestRouteResult(msrest.serialization.Model): \"\"\"Result of testing one route. :param result: Result of", "Twin Reference. :type twin: ~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _validation = { 'route':", "'str'}, 'action': {'key': 'action', 'type': 'str'}, 'ip_mask': {'key': 'ipMask', 'type':", "it routes messages to the fallback endpoint. All required parameters", "super(JobResponseListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class", "hubs and a maximum of 5 routing rules are allowed", "} def __init__( self, **kwargs ): super(EventHubConsumerGroupInfo, self).__init__(**kwargs) self.properties =", "{'key': 'value', 'type': '[JobResponse]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, }", "'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'},", "64 characters. The following names are reserved: events, fileNotifications, $default.", "kwargs.get('location', None) self.role = kwargs.get('role', None) class IotHubNameAvailabilityInfo(msrest.serialization.Model): \"\"\"The properties", "**kwargs ): super(RoutingEventHubProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name']", "1}, } _attribute_map = { 'key': {'key': 'key', 'type': 'str'},", "when sending a request. :ivar provider: Service provider: Microsoft Devices.", "self.name = kwargs['name'] self.source = kwargs['source'] self.condition = kwargs.get('condition', None)", "the 'routes' section get routed to the built-in eventhub endpoint.", "__init__( self, **kwargs ): super(OperationListResult, self).__init__(**kwargs) self.value = None self.next_link", "self, **kwargs ): super(EventHubConsumerGroupInfo, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id", "X509 CA Certificate. :type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificateProperties :ivar id: The resource", "'resource_type': {'key': 'resourceType', 'type': 'str'}, 'sku': {'key': 'sku', 'type': 'IotHubSkuInfo'},", "{'required': True}, } _attribute_map = { 'name': {'key': 'name', 'type':", "self, **kwargs ): super(RoutingServiceBusQueueEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name =", "status of the job. Possible values include: \"unknown\", \"enqueued\", \"running\",", "The maximum number of units. :vartype maximum: long :ivar default:", "None self.status = None self.failure_reason = None self.status_message = None", "{'key': 'disabledDeviceCount', 'type': 'long'}, } def __init__( self, **kwargs ):", "account. :type container_name: str :param file_name_format: File name format for", "of storage container endpoints that IoT hub routes messages to,", "endpoint is accepting messages as expected. The 'unhealthy' status shows", "} def __init__( self, **kwargs ): super(SharedAccessSignatureAuthorizationRuleListResult, self).__init__(**kwargs) self.value =", "self).__init__(**kwargs) self.key_name = kwargs['key_name'] self.primary_key = kwargs.get('primary_key', None) self.secondary_key =", "\"writeDeviceProperties\", \"updateDeviceConfiguration\", \"rebootDevice\", \"factoryResetDevice\", \"firmwareUpdate\". :vartype type: str or ~azure.mgmt.iothub.v2019_11_04.models.JobType", "None) class RoutingStorageContainerProperties(msrest.serialization.Model): \"\"\"The properties related to a storage container", "error to be thrown. :type storage_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.StorageEndpointProperties] :param messaging_endpoints:", "str :param unit: Unit of IotHub type. :type unit: str", "} def __init__( self, **kwargs ): super(CertificateBodyDescription, self).__init__(**kwargs) self.certificate =", "__init__( self, **kwargs ): super(Resource, self).__init__(**kwargs) self.id = None self.name", "in order to apply the fallback route. If the condition", "topic endpoint. :type subscription_id: str :param resource_group: The name of", "{ 'body': {'key': 'body', 'type': 'str'}, 'app_properties': {'key': 'appProperties', 'type':", "name_available: bool :ivar reason: The reason for unavailability. Possible values", "hub/iot-hub-devguide-file-upload. :type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601: The period of time", ":param location: The name of the Azure region. :type location:", "job. Possible values include: \"unknown\", \"enqueued\", \"running\", \"completed\", \"failed\", \"cancelled\".", "'start_time_utc': {'readonly': True}, 'end_time_utc': {'readonly': True}, 'type': {'readonly': True}, 'status':", "= { 'name': {'required': True}, } _attribute_map = { 'name':", ":param type: Response type. :type type: str :param unit: Unit", "get the next set of operation list results if there", "messages which do not meet any of the conditions specified", "= { 'job_id': {'key': 'jobId', 'type': 'str'}, 'start_time_utc': {'key': 'startTimeUtc',", ":param message: Route error message. :type message: str :param severity:", "but should be creatable using the connectionString specified. :type container_name:", "str :param primary_key: The primary key. :type primary_key: str :param", "an X509 CA Certificate. Variables are only populated by the", "'object'}, } def __init__( self, **kwargs ): super(RoutingTwinProperties, self).__init__(**kwargs) self.desired", "next_link: Link to more results. :vartype next_link: str \"\"\" _validation", "def __init__( self, **kwargs ): super(OperationListResult, self).__init__(**kwargs) self.value = None", "string of the service bus queue endpoint. :type connection_string: str", "job, if any. :vartype parent_job_id: str \"\"\" _validation = {", "super(RouteProperties, self).__init__(**kwargs) self.name = kwargs['name'] self.source = kwargs['source'] self.condition =", "a URL link to get the next set of results.", "True}, 'next_link': {'readonly': True}, } _attribute_map = { 'value': {'key':", "super(TestAllRoutesResult, self).__init__(**kwargs) self.routes = kwargs.get('routes', None) class TestRouteInput(msrest.serialization.Model): \"\"\"Input for", ":type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage :param route: Required. Route properties. :type route:", "name: str :ivar current_value: The current value for the quota", "def __init__( self, **kwargs ): super(RoutingTwin, self).__init__(**kwargs) self.tags = kwargs.get('tags',", "string containing the reason for the failure. :vartype failure_reason: str", "): super(Operation, self).__init__(**kwargs) self.name = None self.display = kwargs.get('display', None)", "'exclude_keys': {'required': True}, } _attribute_map = { 'export_blob_container_uri': {'key': 'exportBlobContainerUri',", "\"\"\"The properties of the Job Response object. Variables are only", "be ignored when sending a request. :param authorization_policies: The shared", "self.endpoint = None class ExportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters when", "{'key': 'severity', 'type': 'str'}, 'location': {'key': 'location', 'type': 'RouteErrorRange'}, }", "'str'}, 'endpoint': {'key': 'endpoint', 'type': 'str'}, } def __init__( self,", "class RoutingEventHubProperties(msrest.serialization.Model): \"\"\"The properties related to an event hub endpoint.", "IoT hub applies to messages to be delivered to built-in", "message: str :ivar details: The error details. :vartype details: str", "'location': {'key': 'location', 'type': 'RouteErrorRange'}, } def __init__( self, **kwargs", "True}, 'location': {'required': True}, } _attribute_map = { 'id': {'key':", "= { 'start': {'key': 'start', 'type': 'RouteErrorPosition'}, 'end': {'key': 'end',", "'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type',", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type default_ttl_as_iso8601: ~datetime.timedelta :param feedback: The properties of", "self.details = None class EventHubConsumerGroupInfo(msrest.serialization.Model): \"\"\"The properties of the EventHubConsumerGroupInfo", "'value': {'key': 'value', 'type': '[IotHubSkuDescription]'}, 'next_link': {'key': 'nextLink', 'type': 'str'},", "properties of the EventHubConsumerGroupInfo object. Variables are only populated by", "be ignored when sending a request. :param retention_time_in_days: The retention", "code. :vartype http_status_code: str :ivar message: The error message. :vartype", "object that represents the operation. :type display: ~azure.mgmt.iothub.v2019_11_04.models.OperationDisplay \"\"\" _validation", "statuses have following meanings. The 'healthy' status shows that the", "type. :type type: str :param unit: Unit of IotHub type.", "= { 'service_bus_queues': {'key': 'serviceBusQueues', 'type': '[RoutingServiceBusQueueEndpointProperties]'}, 'service_bus_topics': {'key': 'serviceBusTopics',", "'verificationCode', 'type': 'str'}, 'certificate': {'key': 'certificate', 'type': 'str'}, } def", "of devices in the identity registry. :vartype total_device_count: long :ivar", "{'key': 'value', 'type': '[CertificateDescription]'}, } def __init__( self, **kwargs ):", "{ 'export_blob_container_uri': {'key': 'exportBlobContainerUri', 'type': 'str'}, 'exclude_keys': {'key': 'excludeKeys', 'type':", "def __init__( self, **kwargs ): super(TestRouteResultDetails, self).__init__(**kwargs) self.compilation_errors = kwargs.get('compilation_errors',", "Certificate objects. :param value: The array of Certificate objects. :type", "'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, } def __init__( self,", "container_name: str :param file_name_format: File name format for the blob.", "'location', 'type': 'RouteErrorRange'}, } def __init__( self, **kwargs ): super(RouteCompilationError,", "The array of IotHubSkuDescription. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuDescription] :ivar next_link: The", "'type': 'str'}, 'state': {'key': 'state', 'type': 'str'}, 'host_name': {'key': 'hostName',", "check. :type name: str \"\"\" _validation = { 'name': {'required':", "} _attribute_map = { 'properties': {'key': 'properties', 'type': '{str}'}, 'id':", "name: str :param subscription_id: The subscription identifier of the event", "connection_string: Required. The connection string for the Azure Storage account", "_attribute_map = { 'endpoint_id': {'key': 'endpointId', 'type': 'str'}, 'health_status': {'key':", ":param localized_value: Localized value of name. :type localized_value: str \"\"\"", "__init__( self, **kwargs ): super(TestAllRoutesInput, self).__init__(**kwargs) self.routing_source = kwargs.get('routing_source', None)", "azure.core.exceptions import HttpResponseError import msrest.serialization class CertificateBodyDescription(msrest.serialization.Model): \"\"\"The JSON-serialized X509", "Required. Route properties. :type route: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties :param twin: Routing Twin", "): super(CloudToDeviceProperties, self).__init__(**kwargs) self.max_delivery_count = kwargs.get('max_delivery_count', None) self.default_ttl_as_iso8601 = kwargs.get('default_ttl_as_iso8601',", "'long'}, 'default': {'key': 'default', 'type': 'long'}, 'scale_type': {'key': 'scaleType', 'type':", "underscores, hyphens, has a maximum length of 64 characters, and", "will be ignored when sending a request. :ivar minimum: The", "name of the root container where you upload files. The", ":vartype status: str or ~azure.mgmt.iothub.v2019_11_04.models.JobStatus :ivar failure_reason: If status ==", "{'key': 'value', 'type': '[UserSubscriptionQuota]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, }", "'localizedValue', 'type': 'str'}, } def __init__( self, **kwargs ): super(Name,", "_validation = { 'job_id': {'readonly': True}, 'start_time_utc': {'readonly': True}, 'end_time_utc':", "'input_blob_container_uri': {'key': 'inputBlobContainerUri', 'type': 'str'}, 'output_blob_container_uri': {'key': 'outputBlobContainerUri', 'type': 'str'},", "None self.endpoint = None class ExportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters", "storage. Value should be between 60 and 720 seconds. Default", "is enabled. :type is_enabled: bool \"\"\" _validation = { 'name':", "if there are any. :vartype next_link: str \"\"\" _validation =", "None) self.twin = kwargs.get('twin', None) class TestAllRoutesResult(msrest.serialization.Model): \"\"\"Result of testing", "~azure.mgmt.iothub.v2019_11_04.models.IotHubProperties :param sku: Required. IotHub SKU info. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo", "kwargs['endpoint_names'] self.is_enabled = kwargs['is_enabled'] class RoutingEndpoints(msrest.serialization.Model): \"\"\"The properties related to", "start: Start where the route error happened. :type start: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorPosition", "in order to send to Azure. :param export_blob_container_uri: Required. The", "\"updateDeviceConfiguration\", \"rebootDevice\", \"factoryResetDevice\", \"firmwareUpdate\". :vartype type: str or ~azure.mgmt.iothub.v2019_11_04.models.JobType :ivar", "JobResponse objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.JobResponse] :ivar next_link: The next link.", "self.minimum = None self.maximum = None self.default = None self.scale_type", "'state': {'key': 'state', 'type': 'str'}, 'host_name': {'key': 'hostName', 'type': 'str'},", "} def __init__( self, **kwargs ): super(Name, self).__init__(**kwargs) self.value =", "): super(Resource, self).__init__(**kwargs) self.id = None self.name = None self.type", "required. If it is provided in the response body, it", "self, **kwargs ): super(RegistryStatistics, self).__init__(**kwargs) self.total_device_count = None self.enabled_device_count =", "has a maximum length of 64 characters, and must be", "JSON-serialized array of Endpoint health data. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthData] :ivar", "you can upload files. Currently you can configure only one", "need not exist but should be creatable using the connectionString", "group name. :vartype name: str :ivar type: the resource type.", "in order to send to Azure. :param sas_ttl_as_iso8601: The period", "of routing message. :type body: str :param app_properties: App properties.", "long \"\"\" _validation = { 'total_device_count': {'readonly': True}, 'enabled_device_count': {'readonly':", "'[UserSubscriptionQuota]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self,", "None) class EndpointHealthData(msrest.serialization.Model): \"\"\"The health data for an endpoint. :param", "\"RegistryRead, ServiceConnect, DeviceConnect\", \"RegistryWrite, ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect, DeviceConnect\".", "} def __init__( self, **kwargs ): super(CertificateProperties, self).__init__(**kwargs) self.subject =", "'source': {'required': True}, 'endpoint_names': {'required': True, 'max_items': 1, 'min_items': 1},", "number of units. :vartype maximum: long :ivar default: The default", "100, 'minimum': 1}, } _attribute_map = { 'max_delivery_count': {'key': 'maxDeliveryCount',", "\"DeviceMessages\", \"TwinChangeEvents\", \"DeviceLifecycleEvents\", \"DeviceJobLifecycleEvents\". :type source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param", "can be either primary or secondary. The primary region is", "= None self.updated = None self.verification_code = None self.certificate =", "of possession. :vartype verification_code: str :ivar certificate: The certificate content.", "following names are reserved: events, fileNotifications, $default. Endpoint names must", "need not be the same as the actual topic name.", "in the storage account. :type container_name: str :param file_name_format: File", "'type': 'RouteErrorPosition'}, 'end': {'key': 'end', 'type': 'RouteErrorPosition'}, } def __init__(", "720, 'minimum': 60}, 'max_chunk_size_in_bytes': {'maximum': 524288000, 'minimum': 10485760}, } _attribute_map", "= { 'properties': {'key': 'properties', 'type': 'CertificatePropertiesWithNonce'}, 'id': {'key': 'id',", "'failure_reason': {'key': 'failureReason', 'type': 'str'}, 'status_message': {'key': 'statusMessage', 'type': 'str'},", "1}, 'is_enabled': {'required': True}, } _attribute_map = { 'name': {'key':", "https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file- upload#file-upload-notification-configuration-options. :type sas_ttl_as_iso8601: ~datetime.timedelta :param connection_string: Required. The connection", "class CertificateBodyDescription(msrest.serialization.Model): \"\"\"The JSON-serialized X509 Certificate. :param certificate: base-64 representation", "name: str :param source: Required. The source that the routing", "= { 'properties': {'key': 'properties', 'type': 'CertificateProperties'}, 'id': {'key': 'id',", "properties of an enrichment that your IoT hub applies to", "RoutingTwin(msrest.serialization.Model): \"\"\"Twin reference input parameter. This is an optional parameter.", "self, **kwargs ): super(RoutingEventHubProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name =", "None) class SharedAccessSignatureAuthorizationRule(msrest.serialization.Model): \"\"\"The properties of an IoT hub shared", "this property when the enableFileUploadNotifications property is set to True,", "None) self.resource_group = kwargs.get('resource_group', None) class RoutingServiceBusTopicEndpointProperties(msrest.serialization.Model): \"\"\"The properties related", "operation: str :ivar description: Description of the operation. :vartype description:", "be populated in order to send to Azure. :ivar id:", ":type enrichments: list[~azure.mgmt.iothub.v2019_11_04.models.EnrichmentProperties] \"\"\" _attribute_map = { 'endpoints': {'key': 'endpoints',", "'authorization_policies': {'key': 'authorizationPolicies', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'ip_filter_rules': {'key': 'ipFilterRules', 'type': '[IpFilterRule]'},", "uploaded. :type connection_string: str :param container_name: Required. The name of", "= kwargs.get('fallback_route', None) self.enrichments = kwargs.get('enrichments', None) class RoutingServiceBusQueueEndpointProperties(msrest.serialization.Model): \"\"\"The", "\"\"\"Error details. Variables are only populated by the server, and", "DeviceConnect\", \"RegistryWrite, ServiceConnect\", \"RegistryWrite, DeviceConnect\", \"ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite, ServiceConnect\",", "'type': 'RouteProperties'}, } def __init__( self, **kwargs ): super(MatchedRoute, self).__init__(**kwargs)", "str or ~azure.mgmt.iothub.v2019_11_04.models.JobStatus :ivar failure_reason: If status == failed, this", "self.end = kwargs.get('end', None) class RouteProperties(msrest.serialization.Model): \"\"\"The properties of a", "class UserSubscriptionQuota(msrest.serialization.Model): \"\"\"User subscription quota response. :param id: IotHub type", "'type': 'str'}, 'message': {'key': 'message', 'type': 'RoutingMessage'}, 'twin': {'key': 'twin',", "None) class RoutingTwinProperties(msrest.serialization.Model): \"\"\"RoutingTwinProperties. :param desired: Twin desired properties. :type", "ServiceConnect\", \"RegistryRead, RegistryWrite, DeviceConnect\", \"RegistryRead, ServiceConnect, DeviceConnect\", \"RegistryWrite, ServiceConnect, DeviceConnect\",", "class IotHubSkuDescription(msrest.serialization.Model): \"\"\"SKU properties. Variables are only populated by the", ":param fallback_route: The properties of the route that is used", "expected. The 'unhealthy' status shows that the endpoint is not", "etag: The Etag field is *not* required. If it is", "to list IoT Hub operations. It contains a list of", "self.storage_endpoints = kwargs.get('storage_endpoints', None) self.messaging_endpoints = kwargs.get('messaging_endpoints', None) self.enable_file_upload_notifications =", "str \"\"\" _validation = { 'job_id': {'readonly': True}, 'start_time_utc': {'readonly':", "upload notification queue. :type messaging_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.MessagingEndpointProperties] :param enable_file_upload_notifications: If", "Numerical limit on IotHub type. :type limit: int :param name:", "as the actual topic name. :type name: str :param subscription_id:", "action: Required. The desired action for requests captured by this", "Service provider: Microsoft Devices. :vartype provider: str :ivar resource: Resource", "value: The array of Certificate objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.CertificateDescription] \"\"\"", "endpoints. All required parameters must be populated in order to", "of a routing rule that your IoT hub uses to", "'details', 'type': 'TestRouteResultDetails'}, } def __init__( self, **kwargs ): super(TestRouteResult,", "_validation = { 'connection_string': {'required': True}, 'container_name': {'required': True}, }", "'outputBlobContainerUri', 'type': 'str'}, } def __init__( self, **kwargs ): super(ImportDevicesRequest,", "'key_name': {'key': 'keyName', 'type': 'str'}, 'primary_key': {'key': 'primaryKey', 'type': 'str'},", "The role of the region, can be either primary or", ":type routes: list[~azure.mgmt.iothub.v2019_11_04.models.RouteProperties] :param fallback_route: The properties of the route", "hub applies to messages delivered to endpoints. All required parameters", "batch_frequency_in_seconds: Time interval at which blobs are written to storage.", ":type message: str \"\"\" _validation = { 'name_available': {'readonly': True},", "'fallbackRoute', 'type': 'FallbackRouteProperties'}, 'enrichments': {'key': 'enrichments', 'type': '[EnrichmentProperties]'}, } def", "errors. :param start: Start where the route error happened. :type", ":ivar default: The default number of units. :vartype default: long", "can failover to. Possible values include: \"primary\", \"secondary\". :type role:", ":ivar name: The Event Hub-compatible consumer group name. :vartype name:", "kwargs.get('name', None) class UserSubscriptionQuotaListResult(msrest.serialization.Model): \"\"\"Json-serialized array of User subscription quota", "for a hub. All required parameters must be populated in", "'messagingEndpoints', 'type': '{MessagingEndpointProperties}'}, 'enable_file_upload_notifications': {'key': 'enableFileUploadNotifications', 'type': 'bool'}, 'cloud_to_device': {'key':", "long :ivar max_value: The maximum value of the quota metric.", "str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param message: Routing message. :type message: ~azure.mgmt.iothub.v2019_11_04.models.RoutingMessage", "def __init__( self, **kwargs ): super(CertificateBodyDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate',", "\"DeviceJobLifecycleEvents\". :type source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition", "messages to be delivered to built-in and custom endpoints. See:", "If no condition is provided, it evaluates to true by", "storage container in the storage account. :type container_name: str :param", "True}, } _attribute_map = { 'minimum': {'key': 'minimum', 'type': 'long'},", "is 'avro'. Possible values include: \"Avro\", \"AvroDeflate\", \"JSON\". :type encoding:", "expiry: ~datetime.datetime :ivar thumbprint: The certificate's thumbprint. :vartype thumbprint: str", "self.default = None self.scale_type = None class Resource(msrest.serialization.Model): \"\"\"The common", "__init__( self, **kwargs ): super(RoutingTwin, self).__init__(**kwargs) self.tags = kwargs.get('tags', None)", "The error message. :vartype message: str :ivar details: The error", "create or update calls for the IoT hub. :type event_hub_endpoints:", "= kwargs.get('severity', None) self.location = kwargs.get('location', None) class RouteErrorPosition(msrest.serialization.Model): \"\"\"Position", "{'readonly': True}, 'failure_reason': {'readonly': True}, 'status_message': {'readonly': True}, 'parent_job_id': {'readonly':", "'batchFrequencyInSeconds', 'type': 'int'}, 'max_chunk_size_in_bytes': {'key': 'maxChunkSizeInBytes', 'type': 'int'}, 'encoding': {'key':", "__init__( self, **kwargs ): super(RoutingServiceBusTopicEndpointProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name", "str \"\"\" _validation = { 'subject': {'readonly': True}, 'expiry': {'readonly':", "is_enabled: bool \"\"\" _validation = { 'name': {'required': True, 'pattern':", "TestAllRoutesResult(msrest.serialization.Model): \"\"\"Result of testing all routes. :param routes: JSON-serialized array", "properties. :type route: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties :param twin: Routing Twin Reference. :type", "Resource(msrest.serialization.Model): \"\"\"The common properties of an Azure resource. Variables are", "self.export_blob_container_uri = kwargs['export_blob_container_uri'] self.exclude_keys = kwargs['exclude_keys'] class FailoverInput(msrest.serialization.Model): \"\"\"Use to", "The type of the resource. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo :param capacity:", "= { 'name': {'key': 'name', 'type': 'str'}, 'display': {'key': 'display',", "route. The name can only include alphanumeric characters, periods, underscores,", "'RouteProperties'}, } def __init__( self, **kwargs ): super(MatchedRoute, self).__init__(**kwargs) self.properties", "route error happened. :param line: Line where the route error", "= None class MatchedRoute(msrest.serialization.Model): \"\"\"Routes that matched. :param properties: Properties", "max_chunk_size_in_bytes: Maximum number of bytes for each blob written to", "= kwargs.get('certificate', None) class CertificateDescription(msrest.serialization.Model): \"\"\"The X509 Certificate. Variables are", "value: The array of JobResponse objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.JobResponse] :ivar", "class CloudToDeviceProperties(msrest.serialization.Model): \"\"\"The IoT hub cloud-to-device messaging properties. :param max_delivery_count:", "the route error. Possible values include: \"error\", \"warning\". :type severity:", "self.default_ttl_as_iso8601 = kwargs.get('default_ttl_as_iso8601', None) self.feedback = kwargs.get('feedback', None) class EndpointHealthData(msrest.serialization.Model):", "Required. The type of the resource. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo :param", "the provisioned Event Hub-compatible endpoint used by the IoT hub.", "= kwargs.get('location', None) self.role = kwargs.get('role', None) class IotHubNameAvailabilityInfo(msrest.serialization.Model): \"\"\"The", "'enabled_device_count': {'key': 'enabledDeviceCount', 'type': 'long'}, 'disabled_device_count': {'key': 'disabledDeviceCount', 'type': 'long'},", "= None self.disabled_device_count = None class RouteCompilationError(msrest.serialization.Model): \"\"\"Compilation error when", "Microsoft Devices. :vartype provider: str :ivar resource: Resource Type: IotHubs.", "message. :type message: str :param severity: Severity of the route", ":param route: Required. Route properties. :type route: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties :param twin:", "'nextLink', 'type': 'str'}, } def __init__( self, **kwargs ): super(UserSubscriptionQuotaListResult,", "JSON-serialized array of JobResponse objects with a next link. Variables", "order to send to Azure. :param name: Required. The name", "= kwargs.get('health_status', None) class EndpointHealthDataListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of EndpointHealthData", "device-messages. :type max_delivery_count: int :param default_ttl_as_iso8601: The default time to", "Endpoint health data. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EndpointHealthData] :ivar next_link: Link to", "OperationInputs(msrest.serialization.Model): \"\"\"Input values. All required parameters must be populated in", "= { 'subject': {'readonly': True}, 'expiry': {'readonly': True}, 'thumbprint': {'readonly':", ":type properties: ~azure.mgmt.iothub.v2019_11_04.models.CertificatePropertiesWithNonce :ivar id: The resource identifier. :vartype id:", "None self.reason = None self.message = kwargs.get('message', None) class IotHubProperties(msrest.serialization.Model):", "for the IoT hub. All required parameters must be populated", "URL link to get the next set of results. Variables", ":type ip_mask: str \"\"\" _validation = { 'filter_name': {'required': True},", "= kwargs.get('message', None) class IotHubProperties(msrest.serialization.Model): \"\"\"The properties of an IoT", "only include alphanumeric characters, periods, underscores, hyphens and has a", "= kwargs.get('service_bus_topics', None) self.event_hubs = kwargs.get('event_hubs', None) self.storage_containers = kwargs.get('storage_containers',", "'type': 'long'}, 'disabled_device_count': {'key': 'disabledDeviceCount', 'type': 'long'}, } def __init__(", "which the SAS URI generated by IoT Hub for file", "values include: \"RegistryRead\", \"RegistryWrite\", \"ServiceConnect\", \"DeviceConnect\", \"RegistryRead, RegistryWrite\", \"RegistryRead, ServiceConnect\",", "objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.EventHubConsumerGroupInfo] :ivar next_link: The next link. :vartype", "app_properties: App properties. :type app_properties: dict[str, str] :param system_properties: System", "**kwargs ): super(RoutingStorageContainerProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name']", "message: The error message. :vartype message: str :ivar details: The", "secondary_key: str :param rights: Required. The permissions assigned to the", "self, **kwargs ): super(RoutingEndpoints, self).__init__(**kwargs) self.service_bus_queues = kwargs.get('service_bus_queues', None) self.service_bus_topics", "SKU. Possible values include: \"F1\", \"S1\", \"S2\", \"S3\", \"B1\", \"B2\",", "the failure. :vartype failure_reason: str :ivar status_message: The status message", "exist but should be creatable using the connectionString specified. :type", "whether a given IoT hub name is available. Variables are", "'str'}, 'parent_job_id': {'key': 'parentJobId', 'type': 'str'}, } def __init__( self,", "is available. Variables are only populated by the server, and", "def __init__( self, **kwargs ): super(CertificateProperties, self).__init__(**kwargs) self.subject = None", "Possible values include: \"primary\", \"secondary\". :type role: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubReplicaRoleType", "~azure.mgmt.iothub.v2019_11_04.models.RoutingStorageContainerPropertiesEncoding \"\"\" _validation = { 'connection_string': {'required': True}, 'name': {'required':", "= None self.expiry = None self.thumbprint = None self.is_verified =", "to send to Azure. :param key_name: Required. The name of", "an unhealthy endpoint will be updated to healthy when IoT", "'name': {'required': True}, 'tier': {'readonly': True}, } _attribute_map = {", ":param tags: A set of tags. Twin Tags. :type tags:", "request. :ivar minimum: The minimum number of units. :vartype minimum:", "| delete}. :vartype name: str :param display: The object that", "source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param condition: The condition that is", "using the connectionString specified. :type container_name: str \"\"\" _validation =", ":vartype certificate: str \"\"\" _validation = { 'subject': {'readonly': True},", "'enrichments', 'type': '[EnrichmentProperties]'}, } def __init__( self, **kwargs ): super(RoutingProperties,", "Resource tags. :type tags: dict[str, str] \"\"\" _attribute_map = {", "kwargs['source'] self.condition = kwargs.get('condition', None) self.endpoint_names = kwargs['endpoint_names'] self.is_enabled =", "'[SharedAccessSignatureAuthorizationRule]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self,", "\"\"\"Routes that matched. :param properties: Properties of routes that matched.", "self).__init__(**kwargs) self.certificate = kwargs.get('certificate', None) class CertificateDescription(msrest.serialization.Model): \"\"\"The X509 Certificate.", "'type': 'IotHubSkuInfo'}, } def __init__( self, **kwargs ): super(IotHubDescription, self).__init__(**kwargs)", "'batch_frequency_in_seconds': {'key': 'batchFrequencyInSeconds', 'type': 'int'}, 'max_chunk_size_in_bytes': {'key': 'maxChunkSizeInBytes', 'type': 'int'},", "errors and monitor issues with endpoints. The 'unknown' status shows", "total count of devices in the identity registry. :vartype total_device_count:", "uses to route messages to endpoints. All required parameters must", "may cause incorrect behavior and will be lost if the", "= kwargs.get('routing_source', None) self.message = kwargs.get('message', None) self.twin = kwargs.get('twin',", "will be ignored when sending a request. :param value: List", "rule. Possible values include: \"Accept\", \"Reject\". :type action: str or", "= kwargs.get('max_delivery_count', None) self.default_ttl_as_iso8601 = kwargs.get('default_ttl_as_iso8601', None) self.feedback = kwargs.get('feedback',", "= kwargs.get('line', None) self.column = kwargs.get('column', None) class RouteErrorRange(msrest.serialization.Model): \"\"\"Range", "rights: Required. The permissions assigned to the shared access policy.", "self).__init__(**kwargs) self.minimum = None self.maximum = None self.default = None", "info. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo \"\"\" _validation = { 'id': {'readonly':", "super(RoutingProperties, self).__init__(**kwargs) self.endpoints = kwargs.get('endpoints', None) self.routes = kwargs.get('routes', None)", "properties of a routing rule that your IoT hub uses", "self.capacity = kwargs['capacity'] class IotHubSkuDescriptionListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of IotHubSkuDescription", "self, **kwargs ): super(JobResponseListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link", "container URI. :type export_blob_container_uri: str :param exclude_keys: Required. The value", "self.properties = kwargs.get('properties', None) self.id = None self.name = None", ":param connection_string: Required. The connection string of the service bus", "name. :type name: str :param subscription_id: The subscription identifier of", "to deliver a message on the feedback queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-", "accepting messages as expected. The 'unhealthy' status shows that the", "the project root for license information. # Code generated by", "meet any of the conditions specified in the 'routes' section", "self.name = kwargs['name'] self.subscription_id = kwargs.get('subscription_id', None) self.resource_group = kwargs.get('resource_group',", "**kwargs ): super(TestRouteResult, self).__init__(**kwargs) self.result = kwargs.get('result', None) self.details =", "the routing rules. :type service_bus_queues: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusQueueEndpointProperties] :param service_bus_topics: The list", "ignored when sending a request. :ivar code: The error code.", "ServiceConnect\", \"RegistryRead, DeviceConnect\", \"RegistryWrite, ServiceConnect\", \"RegistryWrite, DeviceConnect\", \"ServiceConnect, DeviceConnect\", \"RegistryRead,", "~azure.mgmt.iothub.v2019_11_04.models.OperationDisplay \"\"\" _validation = { 'name': {'readonly': True}, } _attribute_map", "~azure.mgmt.iothub.v2019_11_04.models.RouteErrorSeverity :param location: Location where the route error happened. :type", "job_id: The job identifier. :vartype job_id: str :ivar start_time_utc: The", "fileNotifications, $default. Endpoint names must be unique across endpoint types.", "at which blobs are written to storage. Value should be", ":type name: str \"\"\" _validation = { 'name': {'required': True},", "is allowed across all endpoint types for free hubs. :param", "parameter. When this property is not set, the messages which", ":param service_bus_queues: The list of Service Bus queue endpoints that", "'int'}, 'max_chunk_size_in_bytes': {'key': 'maxChunkSizeInBytes', 'type': 'int'}, 'encoding': {'key': 'encoding', 'type':", "leaf certificate .cer file or just .pem file content. :type", "'OperationDisplay'}, } def __init__( self, **kwargs ): super(Operation, self).__init__(**kwargs) self.name", "'[EventHubConsumerGroupInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self,", "{ 'properties': {'key': 'properties', 'type': 'CertificateProperties'}, 'id': {'key': 'id', 'type':", "self, **kwargs ): super(CertificateDescription, self).__init__(**kwargs) self.properties = kwargs.get('properties', None) self.id", "} def __init__( self, **kwargs ): super(RoutingTwin, self).__init__(**kwargs) self.tags =", ":type compilation_errors: list[~azure.mgmt.iothub.v2019_11_04.models.RouteCompilationError] \"\"\" _attribute_map = { 'compilation_errors': {'key': 'compilationErrors',", "def __init__( self, **kwargs ): super(EventHubConsumerGroupInfo, self).__init__(**kwargs) self.properties = kwargs.get('properties',", "The name of storage container in the storage account. :type", "self.enabled_device_count = None self.disabled_device_count = None class RouteCompilationError(msrest.serialization.Model): \"\"\"Compilation error", "**kwargs ): super(Resource, self).__init__(**kwargs) self.id = None self.name = None", "the Event Hub-compatible endpoint. :vartype partition_ids: list[str] :ivar path: The", "status shows that the endpoint is not accepting messages, after", "~azure.mgmt.iothub.v2019_11_04.models.IotHubCapacity \"\"\" _validation = { 'resource_type': {'readonly': True}, 'sku': {'required':", "evaluated to apply the routing rule. If no condition is", "self.name = kwargs['name'] self.tier = None self.capacity = kwargs.get('capacity', None)", "): super(IotHubLocationDescription, self).__init__(**kwargs) self.location = kwargs.get('location', None) self.role = kwargs.get('role',", "\"RegistryRead, DeviceConnect\", \"RegistryWrite, ServiceConnect\", \"RegistryWrite, DeviceConnect\", \"ServiceConnect, DeviceConnect\", \"RegistryRead, RegistryWrite,", "endpoint_names: list[str] \"\"\" _validation = { 'key': {'required': True}, 'value':", "kwargs.get('encoding', None) class RoutingTwin(msrest.serialization.Model): \"\"\"Twin reference input parameter. This is", "IoT Hub operations. It contains a list of operations and", "an X509 CA Certificate including the challenge nonce issued for", "type of the scaling enabled. Possible values include: \"Automatic\", \"Manual\",", "condition that is evaluated to apply the routing rule. If", "None) class UserSubscriptionQuotaListResult(msrest.serialization.Model): \"\"\"Json-serialized array of User subscription quota response.", "messages delivered to endpoints. All required parameters must be populated", "routing rules that the IoT hub uses to route messages", "maximum number of units. :vartype maximum: long :ivar default: The", "self.name = kwargs.get('name', None) self.source = kwargs['source'] self.condition = kwargs.get('condition',", "def __init__( self, **kwargs ): super(JobResponse, self).__init__(**kwargs) self.job_id = None", ":type sas_ttl_as_iso8601: ~datetime.timedelta :param connection_string: Required. The connection string for", "testing all routes. :param routing_source: Routing source. Possible values include:", "'id': {'readonly': True}, 'name': {'readonly': True}, 'etag': {'readonly': True}, 'type':", "'long'}, 'maximum': {'key': 'maximum', 'type': 'long'}, 'default': {'key': 'default', 'type':", "The number of partitions for receiving device-to-cloud messages in the", "the route error happened. :type location: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorRange \"\"\" _attribute_map =", "has established an eventually consistent state of health. The 'dead'", "of the Job Response object. Variables are only populated by", "self, **kwargs ): super(IotHubDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link", "which messages that satisfy the condition are routed. Currently only", "class FeedbackProperties(msrest.serialization.Model): \"\"\"The properties of the feedback queue for cloud-to-device", "= { 'properties': {'key': 'properties', 'type': 'RouteProperties'}, } def __init__(", "of route errors. :param start: Start where the route error", "and will be ignored when sending a request. :ivar total_device_count:", "The subscription identifier of the event hub endpoint. :type subscription_id:", "'str'}, 'event_hub_endpoints': {'key': 'eventHubEndpoints', 'type': '{EventHubProperties}'}, 'routing': {'key': 'routing', 'type':", "long :ivar disabled_device_count: The count of disabled devices in the", "the IP address range in CIDR notation for the rule.", "expiration date and time. :vartype expiry: ~datetime.datetime :ivar thumbprint: The", "a request. :ivar code: The error code. :vartype code: str", "object. Variables are only populated by the server, and will", "enrichment that your IoT hub applies to messages delivered to", "= None class IotHubQuotaMetricInfo(msrest.serialization.Model): \"\"\"Quota metrics properties. Variables are only", "{ 'lock_duration_as_iso8601': {'key': 'lockDurationAsIso8601', 'type': 'duration'}, 'ttl_as_iso8601': {'key': 'ttlAsIso8601', 'type':", "kwargs['key'] self.value = kwargs['value'] self.endpoint_names = kwargs['endpoint_names'] class ErrorDetails(msrest.serialization.Model): \"\"\"Error", "Twin desired properties. :type desired: object :param reported: Twin desired", "type: str or ~azure.mgmt.iothub.v2019_11_04.models.JobType :ivar status: The status of the", "class MessagingEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the messaging endpoints used by", "True}, 'failure_reason': {'readonly': True}, 'status_message': {'readonly': True}, 'parent_job_id': {'readonly': True},", "certificate's thumbprint. :vartype thumbprint: str :ivar is_verified: Determines whether certificate", "'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key':", "id: str :ivar name: The Event Hub-compatible consumer group name.", "self.next_link = None class StorageEndpointProperties(msrest.serialization.Model): \"\"\"The properties of the Azure", "'{MessagingEndpointProperties}'}, 'enable_file_upload_notifications': {'key': 'enableFileUploadNotifications', 'type': 'bool'}, 'cloud_to_device': {'key': 'cloudToDevice', 'type':", "hub. :type authorization_policies: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :param ip_filter_rules: The IP filter rules.", "The partition ids in the Event Hub-compatible endpoint. :vartype partition_ids:", ":type route: ~azure.mgmt.iothub.v2019_11_04.models.RouteProperties :param twin: Routing Twin Reference. :type twin:", "= None self.verification_code = None self.certificate = None class CertificateVerificationDescription(msrest.serialization.Model):", "is accepting messages as expected. The 'unhealthy' status shows that", "service_bus_topics: The list of Service Bus topic endpoints that the", "'IotHubSkuInfo'}, 'capacity': {'key': 'capacity', 'type': 'IotHubCapacity'}, } def __init__( self,", "X509 CA Certificate including the challenge nonce issued for the", "'long'}, 'disabled_device_count': {'key': 'disabledDeviceCount', 'type': 'long'}, } def __init__( self,", "True}, 'sku': {'required': True}, } _attribute_map = { 'id': {'key':", "= { 'subject': {'key': 'subject', 'type': 'str'}, 'expiry': {'key': 'expiry',", "'RouteErrorRange'}, } def __init__( self, **kwargs ): super(RouteCompilationError, self).__init__(**kwargs) self.message", "**kwargs ): super(EventHubConsumerGroupsListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link =", "name of the resource group of the event hub endpoint.", "None) self.route = kwargs['route'] self.twin = kwargs.get('twin', None) class TestRouteResult(msrest.serialization.Model):", "The list of shared access policies. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.SharedAccessSignatureAuthorizationRule] :ivar", "capacity information. Variables are only populated by the server, and", "None self.created = None self.updated = None self.certificate = kwargs.get('certificate',", "= kwargs.get('location', None) class RouteErrorPosition(msrest.serialization.Model): \"\"\"Position where the route error", "location for iot hub. :vartype locations: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubLocationDescription] \"\"\" _validation =", "as the actual queue name. :type name: str :param subscription_id:", "None self.scale_type = None class Resource(msrest.serialization.Model): \"\"\"The common properties of", "subject: str :ivar expiry: The certificate's expiration date and time.", "str :param role: The role of the region, can be", "\"\"\"The JSON-serialized array of IotHubQuotaMetricInfo objects with a next link.", "enabled. :type is_enabled: bool \"\"\" _validation = { 'name': {'required':", "kwargs.get('resource_group', None) class RoutingServiceBusTopicEndpointProperties(msrest.serialization.Model): \"\"\"The properties related to service bus", "{'readonly': True}, 'operation': {'readonly': True}, 'description': {'readonly': True}, } _attribute_map", "= { 'source': {'required': True}, 'endpoint_names': {'required': True, 'max_items': 1,", "<reponame>adewaleo/azure-sdk-for-python # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation.", ":type routing: ~azure.mgmt.iothub.v2019_11_04.models.RoutingProperties :param storage_endpoints: The list of Azure Storage", "True}, 'endpoint_names': {'required': True, 'min_items': 1}, } _attribute_map = {", "Hub instance. :param tags: A set of tags. Resource tags.", "RoutingMessage(msrest.serialization.Model): \"\"\"Routing message. :param body: Body of routing message. :type", "connection string of the service bus queue endpoint. :type connection_string:", "'desired', 'type': 'object'}, 'reported': {'key': 'reported', 'type': 'object'}, } def", "name: Required. The name that identifies this endpoint. The name", "that represents the operation. :type display: ~azure.mgmt.iothub.v2019_11_04.models.OperationDisplay \"\"\" _validation =", "send data to this endpoint. The status of an unhealthy", "list[~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuDescription] :ivar next_link: The next link. :vartype next_link: str \"\"\"", "super(CertificatePropertiesWithNonce, self).__init__(**kwargs) self.subject = None self.expiry = None self.thumbprint =", "None class CertificateVerificationDescription(msrest.serialization.Model): \"\"\"The JSON-serialized leaf certificate. :param certificate: base-64", "resource type. :vartype type: str :ivar etag: The etag. :vartype", "request. :param value: The array of IotHubSkuDescription. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuDescription]", "values include: \"Invalid\", \"AlreadyExists\". :vartype reason: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubNameUnavailabilityReason :param", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud- to-device-messages. :type max_delivery_count: int \"\"\" _validation = {", "The retention time for device-to-cloud messages in days. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#device-to-cloud-messages.", "def __init__( self, **kwargs ): super(RegistryStatistics, self).__init__(**kwargs) self.total_device_count = None", "IoT hub uses to route messages to endpoints. All required", "CertificateProperties(msrest.serialization.Model): \"\"\"The description of an X509 CA Certificate. Variables are", "super(IotHubProperties, self).__init__(**kwargs) self.authorization_policies = kwargs.get('authorization_policies', None) self.ip_filter_rules = kwargs.get('ip_filter_rules', None)", "the IoT hub. Possible values include: \"None\", \"DeviceManagement\". :type features:", "{'key': 'failureReason', 'type': 'str'}, 'status_message': {'key': 'statusMessage', 'type': 'str'}, 'parent_job_id':", "__init__( self, **kwargs ): super(RouteCompilationError, self).__init__(**kwargs) self.message = kwargs.get('message', None)", "'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, 'resource_group': {'key': 'resourceGroup', 'type': 'str'},", ":vartype default: long :ivar scale_type: The type of the scaling", ":vartype updated: ~datetime.datetime :ivar verification_code: The certificate's verification code that", "str :ivar description: Description of the operation. :vartype description: str", "Required. The name that identifies this endpoint. The name can", "connection string of the event hub endpoint. :type connection_string: str", ":type max_delivery_count: int \"\"\" _validation = { 'max_delivery_count': {'maximum': 100,", "self).__init__(**kwargs) self.lock_duration_as_iso8601 = kwargs.get('lock_duration_as_iso8601', None) self.ttl_as_iso8601 = kwargs.get('ttl_as_iso8601', None) self.max_delivery_count", "resource: str :ivar operation: Name of the operation. :vartype operation:", "{'key': 'enableFileUploadNotifications', 'type': 'bool'}, 'cloud_to_device': {'key': 'cloudToDevice', 'type': 'CloudToDeviceProperties'}, 'comments':", "cause incorrect behavior and will be lost if the code", "'tags': {'key': 'tags', 'type': '{str}'}, } def __init__( self, **kwargs", "The export blob container URI. :type export_blob_container_uri: str :param exclude_keys:", "secondary. The primary region is where the IoT hub is", "severity: Severity of the route error. Possible values include: \"error\",", ":vartype provisioning_state: str :ivar state: The hub state. :vartype state:", "'totalDeviceCount', 'type': 'long'}, 'enabled_device_count': {'key': 'enabledDeviceCount', 'type': 'long'}, 'disabled_device_count': {'key':", "{'key': 'cloudToDevice', 'type': 'CloudToDeviceProperties'}, 'comments': {'key': 'comments', 'type': 'str'}, 'features':", "to deliver a message. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload. :type max_delivery_count: int \"\"\"", "'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def", "deliver a message. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-file-upload. :type max_delivery_count: int \"\"\" _validation", "None self.type = None self.status = None self.failure_reason = None", "status_message: str :ivar parent_job_id: The job identifier of the parent", "'isEnabled', 'type': 'bool'}, } def __init__( self, **kwargs ): super(RouteProperties,", "properties of the Azure Storage endpoint for file upload. All", ":type column: int \"\"\" _attribute_map = { 'line': {'key': 'line',", "bus topic endpoint. :type connection_string: str :param name: Required. The", "user-provided enrichments that the IoT hub applies to messages to", "the Event Hub-compatible endpoint. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide- messaging#device-to-cloud-messages. :type partition_count: int", "the actual topic name. :type name: str :param subscription_id: The", "str :param resource_group: The name of the resource group of", "): super(UserSubscriptionQuotaListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None", "super(RoutingStorageContainerProperties, self).__init__(**kwargs) self.connection_string = kwargs['connection_string'] self.name = kwargs['name'] self.subscription_id =", "to Azure. :param sas_ttl_as_iso8601: The period of time for which", "'certificate': {'key': 'certificate', 'type': 'str'}, } def __init__( self, **kwargs", "count of disabled devices in the identity registry. :vartype disabled_device_count:", "of an IoT hub shared access policy. All required parameters", "be the same as the actual topic name. :type name:", "'type': 'rfc-1123'}, 'end_time_utc': {'key': 'endTimeUtc', 'type': 'rfc-1123'}, 'type': {'key': 'type',", "when sending a request. :ivar value: List of IoT Hub", "Currently only 1 endpoint is allowed. :type endpoint_names: list[str] :param", "__init__( self, **kwargs ): super(EventHubConsumerGroupInfo, self).__init__(**kwargs) self.properties = kwargs.get('properties', None)", "default. For grammar, see: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-query-language. :type condition: str :param endpoint_names:", "bytes for each blob written to storage. Value should be", "account and that MUST have its key as $default. Specifying", ":vartype next_link: str \"\"\" _validation = { 'next_link': {'readonly': True},", "enabled. :type enable_file_upload_notifications: bool :param cloud_to_device: The IoT hub cloud-to-device", "self).__init__(**kwargs) self.routes = kwargs.get('routes', None) class TestRouteInput(msrest.serialization.Model): \"\"\"Input for testing", "'value', 'type': '[EndpointHealthData]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def", "'default', 'type': 'long'}, 'scale_type': {'key': 'scaleType', 'type': 'str'}, } def", "secondary_key: The secondary key. :type secondary_key: str :param rights: Required.", "route error happened. :type location: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorRange \"\"\" _attribute_map = {", "__init__( self, **kwargs ): super(EnrichmentProperties, self).__init__(**kwargs) self.key = kwargs['key'] self.value", "iot hub. :vartype locations: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubLocationDescription] \"\"\" _validation = { 'provisioning_state':", "self.details = kwargs.get('details', None) class TestRouteResultDetails(msrest.serialization.Model): \"\"\"Detailed result of testing", "value: The array of quota metrics objects. :type value: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubQuotaMetricInfo]", "} _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'source':", "for an endpoint. :param endpoint_id: Id of the endpoint. :type", "be used for proof of possession. :vartype verification_code: str :ivar", "built-in Event Hubs endpoint. :type event_hubs: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingEventHubProperties] :param storage_containers: The", "action: str or ~azure.mgmt.iothub.v2019_11_04.models.IpFilterActionType :param ip_mask: Required. A string that", "The name can only include alphanumeric characters, periods, underscores, hyphens,", "**kwargs ): super(IotHubQuotaMetricInfoListResult, self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link =", "a request. :param value: The array of IotHubSkuDescription. :type value:", "\"\"\"The properties of the messaging endpoints used by this IoT", "True}, 'enabled_device_count': {'readonly': True}, 'disabled_device_count': {'readonly': True}, } _attribute_map =", "_attribute_map = { 'line': {'key': 'line', 'type': 'int'}, 'column': {'key':", "): super(ExportDevicesRequest, self).__init__(**kwargs) self.export_blob_container_uri = kwargs['export_blob_container_uri'] self.exclude_keys = kwargs['exclude_keys'] class", "the identity registry. :vartype disabled_device_count: long \"\"\" _validation = {", "'max_items': 1, 'min_items': 1}, 'is_enabled': {'required': True}, } _attribute_map =", "{ 'export_blob_container_uri': {'required': True}, 'exclude_keys': {'required': True}, } _attribute_map =", "str :ivar type: The resource type. :vartype type: str \"\"\"", ":param max_chunk_size_in_bytes: Maximum number of bytes for each blob written", "{'required': True}, } _attribute_map = { 'failover_region': {'key': 'failoverRegion', 'type':", "delivered to built-in and custom endpoints. See: https://aka.ms/telemetryoneventgrid. :type enrichments:", "'int'}, 'name': {'key': 'name', 'type': 'Name'}, } def __init__( self,", "'type': 'str'}, 'ip_mask': {'key': 'ipMask', 'type': 'str'}, } def __init__(", "https://docs.microsoft.com/azure/iot- hub/iot-hub-devguide-file-upload. :type lock_duration_as_iso8601: ~datetime.timedelta :param ttl_as_iso8601: The period of", "Not specifying a value for this property when the enableFileUploadNotifications", "= { 'message': {'key': 'message', 'type': 'str'}, 'severity': {'key': 'severity',", "type. :vartype type: str :param location: Required. The resource location.", "IoT hub to check. :type name: str \"\"\" _validation =", "self).__init__(**kwargs) self.service_bus_queues = kwargs.get('service_bus_queues', None) self.service_bus_topics = kwargs.get('service_bus_topics', None) self.event_hubs", "Corporation. All rights reserved. # Licensed under the MIT License.", "be excluded during export. :type exclude_keys: bool \"\"\" _validation =", "bool \"\"\" _validation = { 'name': {'required': True, 'pattern': r'^[A-Za-z0-9-._]{1,64}$'},", "= None self.is_verified = None self.created = None self.updated =", "project root for license information. # Code generated by Microsoft", "'duration'}, 'connection_string': {'key': 'connectionString', 'type': 'str'}, 'container_name': {'key': 'containerName', 'type':", "): super(TestRouteResultDetails, self).__init__(**kwargs) self.compilation_errors = kwargs.get('compilation_errors', None) class UserSubscriptionQuota(msrest.serialization.Model): \"\"\"User", "'type': 'object'}, 'properties': {'key': 'properties', 'type': 'RoutingTwinProperties'}, } def __init__(", "file upload notification queue. :type messaging_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.MessagingEndpointProperties] :param enable_file_upload_notifications:", "to the message. :type endpoint_names: list[str] \"\"\" _validation = {", "of results. Variables are only populated by the server, and", "representation of X509 certificate .cer file or just .pem file", "for the file upload notification queue. :type messaging_endpoints: dict[str, ~azure.mgmt.iothub.v2019_11_04.models.MessagingEndpointProperties]", "\"\"\" _attribute_map = { 'properties': {'key': 'properties', 'type': 'RouteProperties'}, }", "'disabled_device_count': {'readonly': True}, } _attribute_map = { 'total_device_count': {'key': 'totalDeviceCount',", ":param value: JSON-serialized array of Endpoint health data. :type value:", "\"\"\" _attribute_map = { 'routing_source': {'key': 'routingSource', 'type': 'str'}, 'message':", "'type': 'str'}, } def __init__( self, **kwargs ): super(CertificateVerificationDescription, self).__init__(**kwargs)", "kwargs.get('compilation_errors', None) class UserSubscriptionQuota(msrest.serialization.Model): \"\"\"User subscription quota response. :param id:", "\"\"\"User subscription quota response. :param id: IotHub type id. :type", "route. Possible values include: \"undefined\", \"false\", \"true\". :type result: str", "'type': 'str'}, } def __init__( self, **kwargs ): super(OperationDisplay, self).__init__(**kwargs)", ":type localized_value: str \"\"\" _attribute_map = { 'value': {'key': 'value',", "location: ~azure.mgmt.iothub.v2019_11_04.models.RouteErrorRange \"\"\" _attribute_map = { 'message': {'key': 'message', 'type':", "= kwargs.get('current_value', None) self.limit = kwargs.get('limit', None) self.name = kwargs.get('name',", ":param authorization_policies: The shared access policies you can use to", "kwargs.get('column', None) class RouteErrorRange(msrest.serialization.Model): \"\"\"Range of route errors. :param start:", "'type': 'FallbackRouteProperties'}, 'enrichments': {'key': 'enrichments', 'type': '[EnrichmentProperties]'}, } def __init__(", "when sending a request. :param value: The array of IotHubDescription", "compilation_errors: list[~azure.mgmt.iothub.v2019_11_04.models.RouteCompilationError] \"\"\" _attribute_map = { 'compilation_errors': {'key': 'compilationErrors', 'type':", "def __init__( self, **kwargs ): super(TestRouteInput, self).__init__(**kwargs) self.message = kwargs.get('message',", "Required. The value for the enrichment property. :type value: str", "the route. The name can only include alphanumeric characters, periods,", "int :param encoding: Encoding that is used to serialize messages", "shared access policies with a next link. Variables are only", "kwargs.get('display', None) class OperationDisplay(msrest.serialization.Model): \"\"\"The object that represents the operation.", "None self.start_time_utc = None self.end_time_utc = None self.type = None", "'eventHubs', 'type': '[RoutingEventHubProperties]'}, 'storage_containers': {'key': 'storageContainers', 'type': '[RoutingStorageContainerProperties]'}, } def", "are mandatory but can be reordered. :type file_name_format: str :param", "{'readonly': True}, 'is_verified': {'readonly': True}, 'created': {'readonly': True}, 'updated': {'readonly':", "event hub endpoint. :type subscription_id: str :param resource_group: The name", "): super(RoutingMessage, self).__init__(**kwargs) self.body = kwargs.get('body', None) self.app_properties = kwargs.get('app_properties',", "each blob written to storage. Value should be between 10485760(10MB)", "{'key': 'authorizationPolicies', 'type': '[SharedAccessSignatureAuthorizationRule]'}, 'ip_filter_rules': {'key': 'ipFilterRules', 'type': '[IpFilterRule]'}, 'provisioning_state':", "~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _validation = { 'route': {'required': True}, } _attribute_map", "= kwargs.get('messaging_endpoints', None) self.enable_file_upload_notifications = kwargs.get('enable_file_upload_notifications', None) self.cloud_to_device = kwargs.get('cloud_to_device',", "'healthStatus', 'type': 'str'}, } def __init__( self, **kwargs ): super(EndpointHealthData,", "True, 'pattern': r'^(?![0-9]+$)(?!-)[a-zA-Z0-9-]{2,49}[a-zA-Z0-9]$'}, 'type': {'readonly': True}, 'location': {'required': True}, }", "See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging. :type routing: ~azure.mgmt.iothub.v2019_11_04.models.RoutingProperties :param storage_endpoints: The list of", "True}, 'reason': {'readonly': True}, } _attribute_map = { 'name_available': {'key':", "} def __init__( self, **kwargs ): super(TagsResource, self).__init__(**kwargs) self.tags =", "The resource tags. :type tags: dict[str, str] \"\"\" _validation =", "{ 'resource_type': {'key': 'resourceType', 'type': 'str'}, 'sku': {'key': 'sku', 'type':", "condition are routed. Currently only one endpoint is allowed. :type", "'scale_type': {'readonly': True}, } _attribute_map = { 'minimum': {'key': 'minimum',", "will be ignored when sending a request. :ivar name: Operation", "\"\"\"The properties related to an event hub endpoint. All required", "{ 'name': {'required': True}, 'tier': {'readonly': True}, } _attribute_map =", "True}, } _attribute_map = { 'resource_type': {'key': 'resourceType', 'type': 'str'},", ":vartype status_message: str :ivar parent_job_id: The job identifier of the", ":ivar type: The resource type. :vartype type: str :param location:", "self).__init__(**kwargs) self.total_device_count = None self.enabled_device_count = None self.disabled_device_count = None", "populated in order to send to Azure. :param failover_region: Required.", "provisioning state. :vartype provisioning_state: str :ivar state: The hub state.", "input_blob_container_uri: str :param output_blob_container_uri: Required. The output blob container URI.", "'str'}, } def __init__( self, **kwargs ): super(CertificateProperties, self).__init__(**kwargs) self.subject", "True}, } _attribute_map = { 'name_available': {'key': 'nameAvailable', 'type': 'bool'},", "'[str]'}, } def __init__( self, **kwargs ): super(EnrichmentProperties, self).__init__(**kwargs) self.key", "= kwargs['location'] self.tags = kwargs.get('tags', None) class IotHubDescription(Resource): \"\"\"The description", "content. :vartype certificate: str \"\"\" _validation = { 'subject': {'readonly':", "~azure.mgmt.iothub.v2019_11_04.models.RoutingTwin \"\"\" _attribute_map = { 'routing_source': {'key': 'routingSource', 'type': 'str'},", "batch_frequency_in_seconds: int :param max_chunk_size_in_bytes: Maximum number of bytes for each", "1, 'minimum': 1}, 'maximum': {'readonly': True}, 'default': {'readonly': True}, 'scale_type':", "'int'}, 'limit': {'key': 'limit', 'type': 'int'}, 'name': {'key': 'name', 'type':", "def __init__( self, **kwargs ): super(CertificateVerificationDescription, self).__init__(**kwargs) self.certificate = kwargs.get('certificate',", "License.txt in the project root for license information. # Code", "def __init__( self, **kwargs ): super(IotHubSkuDescriptionListResult, self).__init__(**kwargs) self.value = kwargs.get('value',", "The only possible keys to this dictionary is events. This", "'[IotHubDescription]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self,", "Generator. # Changes may cause incorrect behavior and will be", "self, **kwargs ): super(FailoverInput, self).__init__(**kwargs) self.failover_region = kwargs['failover_region'] class FallbackRouteProperties(msrest.serialization.Model):", "self).__init__(**kwargs) self.value = kwargs.get('value', None) self.next_link = None class StorageEndpointProperties(msrest.serialization.Model):", "'value': {'key': 'value', 'type': 'str'}, 'localized_value': {'key': 'localizedValue', 'type': 'str'},", "None) self.encoding = kwargs.get('encoding', None) class RoutingTwin(msrest.serialization.Model): \"\"\"Twin reference input", "long :ivar enabled_device_count: The count of enabled devices in the", "'maximum': {'readonly': True}, 'default': {'readonly': True}, 'scale_type': {'readonly': True}, }", ":type capacity: ~azure.mgmt.iothub.v2019_11_04.models.IotHubCapacity \"\"\" _validation = { 'resource_type': {'readonly': True},", "will be lost if the code is regenerated. # --------------------------------------------------------------------------", "routing_source: str or ~azure.mgmt.iothub.v2019_11_04.models.RoutingSource :param message: Routing message. :type message:", "None class RouteCompilationError(msrest.serialization.Model): \"\"\"Compilation error when evaluating route. :param message:", "the device queue. See: https://docs.microsoft.com/azure/iot-hub/iot-hub-devguide-messaging#cloud-to- device-messages. :type max_delivery_count: int :param", "'unknown' status shows that the IoT Hub has not established", "must be unique across endpoint types. :type name: str :param", "{'key': 'column', 'type': 'int'}, } def __init__( self, **kwargs ):", "more than one storage account causes an error to be", "**kwargs ): super(ErrorDetails, self).__init__(**kwargs) self.code = None self.http_status_code = None", "{'key': 'created', 'type': 'rfc-1123'}, 'updated': {'key': 'updated', 'type': 'rfc-1123'}, 'certificate':", "keys to this dictionary is events. This key has to", "of the job. :vartype start_time_utc: ~datetime.datetime :ivar end_time_utc: The time", "} _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'tier':", "IotHub SKU info. :type sku: ~azure.mgmt.iothub.v2019_11_04.models.IotHubSkuInfo \"\"\" _validation = {", "enabled. :type is_enabled: bool \"\"\" _validation = { 'source': {'required':", "{'readonly': True}, 'host_name': {'readonly': True}, 'locations': {'readonly': True}, } _attribute_map", "'str'}, 'endpoint_names': {'key': 'endpointNames', 'type': '[str]'}, } def __init__( self,", "TagsResource(msrest.serialization.Model): \"\"\"A container holding only the Tags for a resource,", "self.failure_reason = None self.status_message = None self.parent_job_id = None class", "super(IotHubSkuInfo, self).__init__(**kwargs) self.name = kwargs['name'] self.tier = None self.capacity =", "Hub-compatible name. :vartype path: str :ivar endpoint: The Event Hub-compatible", "RouteProperties(msrest.serialization.Model): \"\"\"The properties of a routing rule that your IoT", "messages to, based on the routing rules. :type service_bus_queues: list[~azure.mgmt.iothub.v2019_11_04.models.RoutingServiceBusQueueEndpointProperties]", "name that identifies this endpoint. The name can only include", "periods, underscores, hyphens, has a maximum length of 64 characters,", "RoutingEventHubProperties(msrest.serialization.Model): \"\"\"The properties related to an event hub endpoint. All", "conditions specified in the 'routes' section are met. This is", "'pattern': r'^[A-Za-z0-9-._]{1,64}$'}, 'container_name': {'required': True}, 'batch_frequency_in_seconds': {'maximum': 720, 'minimum': 60},", ":type ttl_as_iso8601: ~datetime.timedelta :param max_delivery_count: The number of times the", "all endpoint types for free hubs. :param service_bus_queues: The list", "on IotHub type. :type limit: int :param name: IotHub type.", "secondary location for iot hub. :vartype locations: list[~azure.mgmt.iothub.v2019_11_04.models.IotHubLocationDescription] \"\"\" _validation", "The hub state. :vartype state: str :ivar host_name: The name", "reserved. # Licensed under the MIT License. See License.txt in", "include: \"Automatic\", \"Manual\", \"None\". :vartype scale_type: str or ~azure.mgmt.iothub.v2019_11_04.models.IotHubScaleType \"\"\"", "UserSubscriptionQuota(msrest.serialization.Model): \"\"\"User subscription quota response. :param id: IotHub type id.", "self.name = None self.type = None self.location = kwargs['location'] self.tags", ":type desired: object :param reported: Twin desired properties. :type reported:", "are only populated by the server, and will be ignored", "\"\"\"The IoT hub cloud-to-device messaging properties. :param max_delivery_count: The max", "__init__( self, **kwargs ): super(TestAllRoutesResult, self).__init__(**kwargs) self.routes = kwargs.get('routes', None)", "class TestAllRoutesResult(msrest.serialization.Model): \"\"\"Result of testing all routes. :param routes: JSON-serialized", "def __init__( self, **kwargs ): super(MatchedRoute, self).__init__(**kwargs) self.properties = kwargs.get('properties',", "represents the operation. :type display: ~azure.mgmt.iothub.v2019_11_04.models.OperationDisplay \"\"\" _validation = {", "The value for the enrichment property. :type value: str :param", "**kwargs ): super(TestRouteResultDetails, self).__init__(**kwargs) self.compilation_errors = kwargs.get('compilation_errors', None) class UserSubscriptionQuota(msrest.serialization.Model):", "EventHubConsumerGroupsListResult(msrest.serialization.Model): \"\"\"The JSON-serialized array of Event Hub-compatible consumer group names", "'type': 'long'}, } def __init__( self, **kwargs ): super(IotHubQuotaMetricInfo, self).__init__(**kwargs)", "sending a request. :ivar total_device_count: The total count of devices", "hubs. :type endpoints: ~azure.mgmt.iothub.v2019_11_04.models.RoutingEndpoints :param routes: The list of user-provided", "'partitionCount', 'type': 'int'}, 'partition_ids': {'key': 'partitionIds', 'type': '[str]'}, 'path': {'key':", "be ignored when sending a request. :ivar job_id: The job", "name: Required. The name of the IoT hub to check.", "self.message = kwargs.get('message', None) self.route = kwargs['route'] self.twin = kwargs.get('twin',", "IP filter rules for the IoT hub. All required parameters", "A set of tags. The resource tags. :type tags: dict[str,", "ImportDevicesRequest(msrest.serialization.Model): \"\"\"Use to provide parameters when requesting an import of", "desired: Twin desired properties. :type desired: object :param reported: Twin", "{'readonly': True}, 'endpoint': {'readonly': True}, } _attribute_map = { 'retention_time_in_days':", "of IotHub type. :type current_value: int :param limit: Numerical limit", "_attribute_map = { 'properties': {'key': 'properties', 'type': 'CertificateProperties'}, 'id': {'key':", ":ivar name: Operation name: {provider}/{resource}/{read | write | action |", "self.message = None self.details = None class EventHubConsumerGroupInfo(msrest.serialization.Model): \"\"\"The properties", "flow. Variables are only populated by the server, and will", "self.thumbprint = None self.is_verified = None self.created = None self.updated", "'type': {'readonly': True}, 'location': {'required': True}, 'sku': {'required': True}, }", "object that represents the operation. Variables are only populated by", "'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags',", "None self.description = None class OperationInputs(msrest.serialization.Model): \"\"\"Input values. All required", "a resource, allowing the user to update the tags on", "{'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type':", "'type': 'str'}, 'message': {'key': 'message', 'type': 'str'}, 'details': {'key': 'details',", "registry. :vartype total_device_count: long :ivar enabled_device_count: The count of enabled", ":ivar locations: Primary and secondary location for iot hub. :vartype", "The 'unknown' status shows that the IoT Hub has not", "\"\"\" _attribute_map = { 'tags': {'key': 'tags', 'type': 'object'}, 'properties':", "{'key': 'serviceBusTopics', 'type': '[RoutingServiceBusTopicEndpointProperties]'}, 'event_hubs': {'key': 'eventHubs', 'type': '[RoutingEventHubProperties]'}, 'storage_containers':", "storage container endpoints that IoT hub routes messages to, based", "'long'}, } def __init__( self, **kwargs ): super(IotHubSkuInfo, self).__init__(**kwargs) self.name", "RouteErrorPosition(msrest.serialization.Model): \"\"\"Position where the route error happened. :param line: Line", ":ivar state: The hub state. :vartype state: str :ivar host_name:", "**kwargs ): super(RoutingMessage, self).__init__(**kwargs) self.body = kwargs.get('body', None) self.app_properties =", "for testing all routes. :param routing_source: Routing source. Possible values", "name: str :ivar type: the resource type. :vartype type: str", "the identity registry. :vartype total_device_count: long :ivar enabled_device_count: The count", "\"RegistryRead, RegistryWrite, ServiceConnect\", \"RegistryRead, RegistryWrite, DeviceConnect\", \"RegistryRead, ServiceConnect, DeviceConnect\", \"RegistryWrite,", "of endpoints to which messages that satisfy the condition are" ]
[ "('aliases', ['alias1.yaml']), ('triggers', ['trigger1.yaml', 'cron1.yaml']), ('rules', ['rule1.yaml']), ('triggertypes', ['triggertype1.yaml']), ('executions',", "'path': '/v1/ruleenforcements/%s' % (enforcement_model.id), 'method': 'GET' }, # Action Executions", "{ 'path': '/v1/packs/index/health', 'method': 'GET' }, # Pack views {", "'/v1/actionalias/aliases.alias1', 'method': 'PUT', 'payload': MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1', 'method':", "'path': '/v1/executions/%s/attribute/trigger_instance' % (execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions/%s/children'", "'/v1/webhooks/git', 'method': 'GET' }, # RBAC - roles { 'path':", "'path': '/v1/rbac/role_assignments', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/rbac/role_assignments/%s'", "}, # Pack config schemas { 'path': '/v1/config_schemas', 'method': 'GET',", "Actions { 'path': '/v1/actions', 'method': 'GET', 'is_getall': True }, {", "'method': 'POST', # re-run execution 'payload': {'parameters': {}} }, #", "aliases { 'path': '/v1/actionalias', 'method': 'GET', 'is_getall': True }, {", "'/v1/actions', 'method': 'POST', 'payload': MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1', 'method':", "'/v1/runnertypes/test-runner-1', 'method': 'GET' }, { 'path': '/v1/runnertypes/test-runner-1', 'method': 'PUT', 'payload':", "'POST', 'payload': {'command': 'test command string'} }, # Rules {", "'payload': {'enabled': False} }, # Actions { 'path': '/v1/actions', 'method':", "}, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'GET' }, { 'path': '/v1/actionalias',", "2.0 # (the \"License\"); you may not use this file", "'is_getall': True }, # Rule views { 'path': '/v1/rules/views', 'method':", "UserRoleAssignment.add_or_update(self.role_assignment_db_model) @mock.patch.object(HooksHolder, 'get_triggers_for_hook', mock.MagicMock( return_value=[DUMMY_TRIGGER_DICT])) def test_api_endpoints_behind_rbac_wall(self): # alias_model =", "'create_trigger_type_db', mock.MagicMock()): from st2api.controllers.v1.webhooks import HooksHolder from st2common.persistence.rbac import UserRoleAssignment", "% (rule_model.ref), 'method': 'PUT', 'payload': MOCK_RULE_1 }, { 'path': '/v1/rules/%s'", "endpoint.get('is_getall', False): continue response = self.app.get(endpoint['path'] + '?limit=-1') self.assertEqual(response.status_code, http_client.OK)", "import OrderedDict import six import mock from st2common.services import triggers", "'parentliveaction.yaml', 'childliveaction.yaml']), ('enforcements', ['enforcement1.yaml']), ('apikeys', ['apikey1.yaml']), ('traces', ['trace_for_test_enforce.yaml']) ]) MOCK_RUNNER_1", "'cloud'} }, { 'path': '/v1/packs/index/health', 'method': 'GET' }, # Pack", "- permission types { 'path': '/v1/rbac/permission_types', 'method': 'GET', 'is_getall': True", "# re-run execution 'payload': {'parameters': {}} }, # Action execution", "'channel', 'source_channel': 'bar'} }, # Webhook { 'path': '/v1/webhooks/st2', 'method':", "'foo': 'bar' } }, # Sensors { 'path': '/v1/sensortypes', 'method':", "denied. \"\"\" register_packs = True fixtures_loader = FixturesLoader() coordinator =", "def _perform_request_for_endpoint(self, endpoint): if endpoint['method'] == 'GET': response = self.app.get(endpoint['path'],", "} MOCK_RULE_1 = { 'enabled': True, 'name': 'st2.test.rule2', 'pack': 'yoyohoneysingh',", "'description': '' } class APIControllersRBACTestCase(APIControllerWithRBACTestCase): \"\"\" Test class which hits", "True }, { 'path': '/v1/rbac/role_assignments/%s' % (self.role_assignment_db_model['id']), 'method': 'GET' },", "supported_endpoints: response = self._perform_request_for_endpoint(endpoint=endpoint) msg = '%s \"%s\" didn\\'t return", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "cls).setUpClass() cls.coordinator = coordination.get_coordinator(use_cache=False) # Register mock service in the", "'method': 'GET', 'is_getall': True }, # Rule views { 'path':", "one, edit and delete operations self.models = self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK, fixtures_dict=TEST_FIXTURES) self.role_assignment_db_model", "'path': '/v1/actions/wolfpack.action-1', 'method': 'DELETE' }, # Action aliases { 'path':", "import UserRoleAssignmentDB from st2common.service_setup import register_service_in_service_registry from st2common.services import coordination", "'DELETE': return self.app.delete(endpoint['path'], expect_errors=True) else: raise ValueError('Unsupported method: %s' %", "hits all the API endpoints which are behind the RBAC", "# stop execution }, { 'path': '/v1/executions/%s/re_run' % (execution_model.id), 'method':", "} } MOCK_ACTION_ALIAS_1 = { 'name': 'alias3', 'pack': 'aliases', 'description':", "{ 'name': 'alias3', 'pack': 'aliases', 'description': 'test description', 'action_ref': 'core.local',", "not endpoint.get('is_getall', False): continue response = self.app.get(endpoint['path'] + '?limit=-1') self.assertEqual(response.status_code,", "{ 'name': 'test-runner-1', 'description': 'test', 'enabled': False } MOCK_ACTION_1 =", "the StackStorm, Inc ('StackStorm') under one or more # contributor", "governing permissions and # limitations under the License. from collections", "DUMMY_TRIGGER_DICT http_client = six.moves.http_client __all__ = [ 'APIControllersRBACTestCase' ] FIXTURES_PACK", "from st2api.controllers.v1.webhooks import HooksHolder from st2common.persistence.rbac import UserRoleAssignment from st2common.models.db.rbac", "all the API endpoints which are behind the RBAC wall", "config as tests_config from st2tests.fixturesloader import FixturesLoader from open_rbac.tests import", "{'type': 'string', 'default': 'C1', 'position': 0}, 'd': {'type': 'string', 'default':", "Sensors { 'path': '/v1/sensortypes', 'method': 'GET', 'is_getall': True }, {", "'/v1/actionalias/match', 'method': 'POST', 'payload': {'command': 'test command string'} }, #", "ponies', 'user': 'channel', 'source_channel': 'bar'} }, # Webhook { 'path':", "permissions and # limitations under the License. from collections import", "return self.app.put_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method'] == 'DELETE': return self.app.delete(endpoint['path'],", "tests_config from st2tests.fixturesloader import FixturesLoader from open_rbac.tests import APIControllerWithRBACTestCase from", "{ 'path': '/v1/sensortypes', 'method': 'GET', 'is_getall': True }, { 'path':", "\"\"\" Test class which hits all the API endpoints which", "'/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'PUT', 'payload': {'enabled': False} }, #", "# Timers { 'path': '/v1/timers', 'method': 'GET' }, { 'path':", "Also test ?limit=-1 - admin user self.use_user(self.users['admin']) for endpoint in", "language governing permissions and # limitations under the License. from", "'payload': { 'trigger': 'some', 'payload': { 'some': 'thing' } }", "that access to icon.png file doesn't require any permissions response", "StackStorm, Inc ('StackStorm') under one or more # contributor license", "'/v1/service_registry/groups/mock_service/members', 'method': 'GET', 'is_getall': True } ] self.use_user(self.users['no_permissions']) for endpoint", "{ 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'DELETE' }, # Rule", "{'type': 'string', 'default': 'D1', 'immutable': True} } } MOCK_ACTION_ALIAS_1 =", "# Also test ?limit=-1 - admin user self.use_user(self.users['admin']) for endpoint", "= { 'name': 'ma.dummy.action', 'pack': 'examples', 'description': 'test description', 'enabled':", "{'packs': 'libcloud'} }, { 'path': '/v1/packs/uninstall', 'method': 'POST', 'payload': {'packs':", "RBAC wall with a user which has no permissions and", "RBAC - permission types { 'path': '/v1/rbac/permission_types', 'method': 'GET', 'is_getall':", "'examples', 'description': 'test description', 'enabled': True, 'entry_point': '/tmp/test/action2.py', 'runner_type': 'local-shell-script',", "'GET' }, { 'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'PUT', 'payload':", "'PUT', 'payload': {'enabled': False} }, # Actions { 'path': '/v1/actions',", "% (execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions/%s/children' % (execution_model.id),", "'is_getall': True }, { 'path': '/v1/service_registry/groups/mock_service/members', 'method': 'GET', 'is_getall': True", "expect_errors=True) elif endpoint['method'] == 'PUT': return self.app.put_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif", "'ip1': '{{trigger.t1_p}}' } }, 'description': '' } class APIControllersRBACTestCase(APIControllerWithRBACTestCase): \"\"\"", "['execution1.yaml']), ('liveactions', ['liveaction1.yaml', 'parentliveaction.yaml', 'childliveaction.yaml']), ('enforcements', ['enforcement1.yaml']), ('apikeys', ['apikey1.yaml']), ('traces',", "# Rule enforcements { 'path': '/v1/ruleenforcements', 'method': 'GET', 'is_getall': True", "endpoint['method'] == 'GET': response = self.app.get(endpoint['path'], expect_errors=True) elif endpoint['method'] ==", "'path': '/v1/rules', 'method': 'POST', 'payload': MOCK_RULE_1 }, { 'path': '/v1/rules/%s'", "= self.app.get('/v1/packs/views/file/dummy_pack_2/pack.yaml', expect_errors=True) self.assertEqual(response.status_code, http_client.FORBIDDEN) def _perform_request_for_endpoint(self, endpoint): if endpoint['method']", "response = self.app.get(endpoint['path'] + '?limit=-1', expect_errors=True) msg = '%s \"%s\"", "}, { 'path': '/v1/rbac/role_assignments/%s' % (self.role_assignment_db_model['id']), 'method': 'GET' }, #", "not endpoint.get('is_getall', False): continue response = self.app.get(endpoint['path'] + '?limit=-1', expect_errors=True)", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "(execution_model.id), 'method': 'DELETE' # stop execution }, { 'path': '/v1/executions/%s/re_run'", "('traces', ['trace_for_test_enforce.yaml']) ]) MOCK_RUNNER_1 = { 'name': 'test-runner-1', 'description': 'test',", "'path': '/v1/config_schemas/dummy_pack_1', 'method': 'GET' }, { 'path': '/v1/packs/views/file/dummy_pack_1/pack.yaml', 'method': 'GET'", "'core.local'} # schedule execution / run action }, { 'path':", "(self.role_assignment_db_model['id']), 'method': 'GET' }, # RBAC - permission types {", "register_packs = True fixtures_loader = FixturesLoader() coordinator = None @classmethod", "+ '?limit=-1', expect_errors=True) msg = '%s \"%s\" didn\\'t return 403", "st2tests.fixturesloader import FixturesLoader from open_rbac.tests import APIControllerWithRBACTestCase from tests.unit.controllers.v1.test_webhooks import", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'GET'", "st2tests import config as tests_config from st2tests.fixturesloader import FixturesLoader from", "= self.models['aliases']['alias1.yaml'] sensor_model = self.models['sensors']['sensor1.yaml'] rule_model = self.models['rules']['rule1.yaml'] enforcement_model =", "Packs { 'path': '/v1/packs', 'method': 'GET', 'is_getall': True }, {", "}, { 'path': '/v1/packs/index/search', 'method': 'POST', 'payload': {'query': 'cloud'} },", "'method': 'GET' }, { 'path': '/v1/executions', 'method': 'POST', 'payload': {'action':", "under the License is distributed on an \"AS IS\" BASIS,", "{ 'trigger.k1': { 'pattern': 't1_p_v', 'type': 'equals' } }, 'action':", "License for the specific language governing permissions and # limitations", "'GET' }, { 'path': '/v1/packs/views/file/dummy_pack_1/pack.yaml', 'method': 'GET' }, # Pack", "{ 'some': 'thing' } } }, # Traces { 'path':", "distributed with # this work for additional information regarding copyright", "forbidden response = self.app.get('/v1/packs/views/file/dummy_pack_2/pack.yaml', expect_errors=True) self.assertEqual(response.status_code, http_client.FORBIDDEN) def _perform_request_for_endpoint(self, endpoint):", "'GET', 'is_getall': True }, { 'path': '/v1/ruleenforcements/%s' % (enforcement_model.id), 'method':", "Timers { 'path': '/v1/timers', 'method': 'GET' }, { 'path': '/v1/timers/%s'", "information regarding copyright ownership. # The ASF licenses this file", "Action aliases { 'path': '/v1/actionalias', 'method': 'GET', 'is_getall': True },", "self.app.get('/v1/packs/views/file/dummy_pack_2/pack.yaml', expect_errors=True) self.assertEqual(response.status_code, http_client.FORBIDDEN) def _perform_request_for_endpoint(self, endpoint): if endpoint['method'] ==", "'path': '/v1/aliasexecution', 'method': 'POST', 'payload': {'name': 'alias1', 'format': 'foo bar", "== 'DELETE': return self.app.delete(endpoint['path'], expect_errors=True) else: raise ValueError('Unsupported method: %s'", "{ 'path': '/v1/packs/views/files/dummy_pack_1', 'method': 'GET' }, # Pack config schemas", "coordination.get_coordinator(use_cache=False) # Register mock service in the service registry for", "'immutable': True} } } MOCK_ACTION_ALIAS_1 = { 'name': 'alias3', 'pack':", "- admin user self.use_user(self.users['admin']) for endpoint in supported_endpoints: if not", "Rule enforcements { 'path': '/v1/ruleenforcements', 'method': 'GET', 'is_getall': True },", "'/v1/executions/%s' % (execution_model.id), 'method': 'DELETE' # stop execution }, {", "'path': '/v1/actionalias/match', 'method': 'POST', 'payload': {'command': 'test command string'} },", "# Register packs if self.register_packs: self._register_packs() # Insert mock objects", "return self.app.delete(endpoint['path'], expect_errors=True) else: raise ValueError('Unsupported method: %s' % (endpoint['method']))", "return self.app.post_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method'] == 'PUT': return self.app.put_json(endpoint['path'],", "self._register_packs() # Insert mock objects - those objects are used", "access denied. \"\"\" register_packs = True fixtures_loader = FixturesLoader() coordinator", "(sensor_model.ref), 'method': 'PUT', 'payload': {'enabled': False} }, # Actions {", "description', 'enabled': True, 'entry_point': '/tmp/test/action2.py', 'runner_type': 'local-shell-script', 'parameters': { 'c':", "'method': 'GET' }, { 'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'PUT',", "'C1', 'position': 0}, 'd': {'type': 'string', 'default': 'D1', 'immutable': True}", "'POST', 'payload': {'types': ['actions']} }, { 'path': '/v1/packs/index/search', 'method': 'POST',", "'GET' }, # Pack views { 'path': '/v1/packs/views/files/dummy_pack_1', 'method': 'GET'", "command string'} }, # Rules { 'path': '/v1/rules', 'method': 'GET',", "'method': 'GET' }, { 'path': '/v1/rules', 'method': 'POST', 'payload': MOCK_RULE_1", "}, # Rule enforcements { 'path': '/v1/ruleenforcements', 'method': 'GET', 'is_getall':", "'is_getall': True }, { 'path': '/v1/ruleenforcements/%s' % (enforcement_model.id), 'method': 'GET'", "{ 'path': '/v1/rbac/permission_types/action', 'method': 'GET' }, # Action views {", "'GET', 'is_getall': True }, { 'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method':", "'path': '/v1/webhooks/st2', 'method': 'POST', 'payload': { 'trigger': 'some', 'payload': {", "'local.yaml']), ('aliases', ['alias1.yaml']), ('triggers', ['trigger1.yaml', 'cron1.yaml']), ('rules', ['rule1.yaml']), ('triggertypes', ['triggertype1.yaml']),", "{ 'enabled': True, 'name': 'st2.test.rule2', 'pack': 'yoyohoneysingh', 'trigger': { 'type':", "'payload': {'packs': 'libcloud'} }, { 'path': '/v1/packs/uninstall', 'method': 'POST', 'payload':", "(rule_model.ref), 'method': 'GET' }, { 'path': '/v1/rules', 'method': 'POST', 'payload':", "= FixturesLoader() coordinator = None @classmethod def setUpClass(cls): tests_config.parse_args(coordinator_noop=True) super(APIControllersRBACTestCase,", "'ma.dummy.action', 'pack': 'examples', 'description': 'test description', 'enabled': True, 'entry_point': '/tmp/test/action2.py',", "{ 'type': 'wolfpack.triggertype-1' }, 'criteria': { 'trigger.k1': { 'pattern': 't1_p_v',", "{ 'path': '/v1/packs/register', 'method': 'POST', 'payload': {'types': ['actions']} }, {", "'path': '/v1/timers/%s' % (timer_model.id), 'method': 'GET' }, # Webhooks {", "response = self._perform_request_for_endpoint(endpoint=endpoint) msg = '%s \"%s\" didn\\'t return 403", "'run-local.yaml']), ('sensors', ['sensor1.yaml']), ('actions', ['action1.yaml', 'local.yaml']), ('aliases', ['alias1.yaml']), ('triggers', ['trigger1.yaml',", "ownership. # The ASF licenses this file to You under", "}, # Sensors { 'path': '/v1/sensortypes', 'method': 'GET', 'is_getall': True", "# Test that access to icon.png file doesn't require any", "self.models['enforcements']['enforcement1.yaml'] execution_model = self.models['executions']['execution1.yaml'] trace_model = self.models['traces']['trace_for_test_enforce.yaml'] timer_model = self.models['triggers']['cron1.yaml']", "software # distributed under the License is distributed on an", "'name': 'st2.test.rule2', 'pack': 'yoyohoneysingh', 'trigger': { 'type': 'wolfpack.triggertype-1' }, 'criteria':", "'is_getall': True } ] self.use_user(self.users['no_permissions']) for endpoint in supported_endpoints: response", "code (body=%s)' % (endpoint['method'], endpoint['path'], response.body) self.assertEqual(response.status_code, http_client.FORBIDDEN, msg) #", "with # this work for additional information regarding copyright ownership.", "self.models['triggers']['cron1.yaml'] supported_endpoints = [ # Runners { 'path': '/v1/runnertypes', 'method':", "'path': '/v1/rules/views', 'method': 'GET', 'is_getall': True }, # Service registry", "}, # Pack views { 'path': '/v1/packs/views/files/dummy_pack_1', 'method': 'GET' },", "'bar' } }, # Sensors { 'path': '/v1/sensortypes', 'method': 'GET',", "MOCK_RULE_1 }, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'DELETE' },", "http_client.OK) # Other files should return forbidden response = self.app.get('/v1/packs/views/file/dummy_pack_2/pack.yaml',", "'test-runner-1', 'description': 'test', 'enabled': False } MOCK_ACTION_1 = { 'name':", "{ 'path': '/v1/configs/dummy_pack_1', 'method': 'PUT', 'payload': { 'foo': 'bar' }", "'method': 'POST', 'payload': {'command': 'test command string'} }, # Rules", "role='role', source='assignments/user.yaml') UserRoleAssignment.add_or_update(self.role_assignment_db_model) @mock.patch.object(HooksHolder, 'get_triggers_for_hook', mock.MagicMock( return_value=[DUMMY_TRIGGER_DICT])) def test_api_endpoints_behind_rbac_wall(self): #", "compliance with # the License. You may obtain a copy", "}, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'DELETE' }, # Action aliases", "licenses this file to You under the Apache License, Version", "{ 'path': '/v1/service_registry/groups/mock_service/members', 'method': 'GET', 'is_getall': True } ] self.use_user(self.users['no_permissions'])", "from st2tests import config as tests_config from st2tests.fixturesloader import FixturesLoader", "'method': 'GET' }, { 'path': '/v1/actions', 'method': 'POST', 'payload': MOCK_ACTION_1", "{ 'path': '/v1/executions/%s' % (execution_model.id), 'method': 'GET' }, { 'path':", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/rules/%s' % (rule_model.ref),", "self.app.get(endpoint['path'], expect_errors=True) elif endpoint['method'] == 'POST': return self.app.post_json(endpoint['path'], endpoint['payload'], expect_errors=True)", "Licensed to the StackStorm, Inc ('StackStorm') under one or more", "'/tmp/test/action2.py', 'runner_type': 'local-shell-script', 'parameters': { 'c': {'type': 'string', 'default': 'C1',", "{ 'path': '/v1/webhooks/git', 'method': 'GET' }, # RBAC - roles", "class which hits all the API endpoints which are behind", "{ 'foo': 'bar' } }, # Sensors { 'path': '/v1/sensortypes',", "}, { 'path': '/v1/config_schemas/dummy_pack_1', 'method': 'GET' }, { 'path': '/v1/packs/views/file/dummy_pack_1/pack.yaml',", "types { 'path': '/v1/rbac/permission_types', 'method': 'GET', 'is_getall': True }, {", "}, # Rules { 'path': '/v1/rules', 'method': 'GET', 'is_getall': True", "'name': 'mock_service'}, start_heart=True) @classmethod def tearDownClass(cls): super(APIControllersRBACTestCase, cls).tearDownClass() coordination.coordinator_teardown(cls.coordinator) def", "'/v1/sensortypes', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/sensortypes/%s' %", "'path': '/v1/executions', 'method': 'POST', 'payload': {'action': 'core.local'} # schedule execution", "'GET' }, { 'path': '/v1/webhooks/git', 'method': 'GET' }, # RBAC", "- those objects are used to test get one, edit", "Webhooks { 'path': '/v1/webhooks', 'method': 'GET' }, { 'path': '/v1/webhooks/git',", "'is_getall': True }, { 'path': '/v1/runnertypes/test-runner-1', 'method': 'GET' }, {", "% (rule_model.ref), 'method': 'GET' }, { 'path': '/v1/rules', 'method': 'POST',", "execution nested controllers { 'path': '/v1/executions/%s/attribute/trigger_instance' % (execution_model.id), 'method': 'GET'", "'value1', 'name': 'mock_service'}, start_heart=True) @classmethod def tearDownClass(cls): super(APIControllersRBACTestCase, cls).tearDownClass() coordination.coordinator_teardown(cls.coordinator)", "Test that access to icon.png file doesn't require any permissions", "'PUT': return self.app.put_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method'] == 'DELETE': return", "'/v1/actionalias', 'method': 'POST', 'payload': MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1', 'method':", "'path': '/v1/actionalias/aliases.alias1', 'method': 'GET' }, { 'path': '/v1/actionalias', 'method': 'POST',", "# The ASF licenses this file to You under the", "makes sure API returns access denied. \"\"\" register_packs = True", "'PUT', 'payload': { 'foo': 'bar' } }, # Sensors {", "endpoint): if endpoint['method'] == 'GET': response = self.app.get(endpoint['path'], expect_errors=True) elif", "mock.patch.object(trigger_service, 'create_trigger_type_db', mock.MagicMock()): from st2api.controllers.v1.webhooks import HooksHolder from st2common.persistence.rbac import", "% (timer_model.id), 'method': 'GET' }, # Webhooks { 'path': '/v1/webhooks',", "API returns access denied. \"\"\" register_packs = True fixtures_loader =", "= { 'enabled': True, 'name': 'st2.test.rule2', 'pack': 'yoyohoneysingh', 'trigger': {", "endpoints which are behind the RBAC wall with a user", "}, { 'path': '/v1/packs/index/health', 'method': 'GET' }, # Pack views", "'PUT', 'payload': MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'DELETE' },", "'method': 'GET' }, # Pack configs { 'path': '/v1/configs', 'method':", "'user': 'channel', 'source_channel': 'bar'} }, # Webhook { 'path': '/v1/webhooks/st2',", "'equals' } }, 'action': { 'ref': 'sixpack.st2.test.action', 'parameters': { 'ip2':", "= self.models['triggers']['cron1.yaml'] supported_endpoints = [ # Runners { 'path': '/v1/runnertypes',", "'/v1/webhooks', 'method': 'GET' }, { 'path': '/v1/webhooks/git', 'method': 'GET' },", "}, # Service registry { 'path': '/v1/service_registry/groups', 'method': 'GET', 'is_getall':", "'is_getall': True }, { 'path': '/v1/packs/dummy_pack_1', 'method': 'GET' }, #", "{ 'path': '/v1/rules', 'method': 'GET', 'is_getall': True }, { 'path':", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/service_registry/groups/mock_service/members', 'method': 'GET',", "'is_getall': True }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'GET' }, {", "'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'DELETE' }, # Rule enforcements", "= coordination.get_coordinator(use_cache=False) # Register mock service in the service registry", "'payload': MOCK_RULE_1 }, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'DELETE'", "'path': '/v1/actions/views/overview', 'method': 'GET', 'is_getall': True }, # Rule views", "['actions']} }, { 'path': '/v1/packs/index/search', 'method': 'POST', 'payload': {'query': 'cloud'}", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "six.moves.http_client __all__ = [ 'APIControllersRBACTestCase' ] FIXTURES_PACK = 'generic' TEST_FIXTURES", "'/v1/executions/%s/output' % (execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions', 'method':", "{ 'path': '/v1/actionalias/match', 'method': 'POST', 'payload': {'command': 'test command string'}", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "use this file except in compliance with # the License.", "supported_endpoints: if not endpoint.get('is_getall', False): continue response = self.app.get(endpoint['path'] +", "# Sensors { 'path': '/v1/sensortypes', 'method': 'GET', 'is_getall': True },", "should return forbidden response = self.app.get('/v1/packs/views/file/dummy_pack_2/pack.yaml', expect_errors=True) self.assertEqual(response.status_code, http_client.FORBIDDEN) def", "}, # Action aliases { 'path': '/v1/actionalias', 'method': 'GET', 'is_getall':", "'action': { 'ref': 'sixpack.st2.test.action', 'parameters': { 'ip2': '{{rule.k1}}', 'ip1': '{{trigger.t1_p}}'", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/packs/dummy_pack_1', 'method': 'GET'", "'is_getall': True }, { 'path': '/v1/rbac/permission_types/action', 'method': 'GET' }, #", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "collections import OrderedDict import six import mock from st2common.services import", "used to test get one, edit and delete operations self.models", "'method': 'PUT', 'payload': {'enabled': False} }, # Actions { 'path':", "'alias1', 'format': 'foo bar ponies', 'command': 'foo bar ponies', 'user':", "'method': 'GET' }, # Timers { 'path': '/v1/timers', 'method': 'GET'", "to in writing, software # distributed under the License is", "trace_model = self.models['traces']['trace_for_test_enforce.yaml'] timer_model = self.models['triggers']['cron1.yaml'] supported_endpoints = [ #", "True }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'GET' }, { 'path':", "'path': '/v1/traces', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/traces/%s'", "'type': 'wolfpack.triggertype-1' }, 'criteria': { 'trigger.k1': { 'pattern': 't1_p_v', 'type':", "# See the License for the specific language governing permissions", "endpoint['method'] == 'PUT': return self.app.put_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method'] ==", "'method': 'GET' }, { 'path': '/v1/webhooks/git', 'method': 'GET' }, #", "self.models['sensors']['sensor1.yaml'] rule_model = self.models['rules']['rule1.yaml'] enforcement_model = self.models['enforcements']['enforcement1.yaml'] execution_model = self.models['executions']['execution1.yaml']", "access to icon.png file doesn't require any permissions response =", "st2common.persistence.rbac import UserRoleAssignment from st2common.models.db.rbac import UserRoleAssignmentDB from st2common.service_setup import", "}, { 'path': '/v1/rbac/roles/admin', 'method': 'GET' }, # RBAC -", "additional information regarding copyright ownership. # The ASF licenses this", "'parameters': { 'c': {'type': 'string', 'default': 'C1', 'position': 0}, 'd':", "or agreed to in writing, software # distributed under the", "(enforcement_model.id), 'method': 'GET' }, # Action Executions { 'path': '/v1/executions',", "'path': '/v1/webhooks', 'method': 'GET' }, { 'path': '/v1/webhooks/git', 'method': 'GET'", "required by applicable law or agreed to in writing, software", "= six.moves.http_client __all__ = [ 'APIControllersRBACTestCase' ] FIXTURES_PACK = 'generic'", "'is_getall': True }, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'GET'", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "= self.models['sensors']['sensor1.yaml'] rule_model = self.models['rules']['rule1.yaml'] enforcement_model = self.models['enforcements']['enforcement1.yaml'] execution_model =", "'/v1/packs/uninstall', 'method': 'POST', 'payload': {'packs': 'libcloud'} }, { 'path': '/v1/packs/register',", "copyright ownership. # The ASF licenses this file to You", "operations self.models = self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK, fixtures_dict=TEST_FIXTURES) self.role_assignment_db_model = UserRoleAssignmentDB( user='user', role='role',", "mock from st2common.services import triggers as trigger_service with mock.patch.object(trigger_service, 'create_trigger_type_db',", "'GET', 'is_getall': True }, { 'path': '/v1/configs/dummy_pack_1', 'method': 'GET' },", "= self._perform_request_for_endpoint(endpoint=endpoint) msg = '%s \"%s\" didn\\'t return 403 status", "'/v1/packs/index/health', 'method': 'GET' }, # Pack views { 'path': '/v1/packs/views/files/dummy_pack_1',", "import mock from st2common.services import triggers as trigger_service with mock.patch.object(trigger_service,", "'/v1/configs/dummy_pack_1', 'method': 'PUT', 'payload': { 'foo': 'bar' } }, #", "= { 'name': 'test-runner-1', 'description': 'test', 'enabled': False } MOCK_ACTION_1", "st2common.services import coordination from st2tests import config as tests_config from", "'/v1/rules/%s' % (rule_model.ref), 'method': 'DELETE' }, # Rule enforcements {", "False } MOCK_ACTION_1 = { 'name': 'ma.dummy.action', 'pack': 'examples', 'description':", "'is_getall': True }, { 'path': '/v1/executions/%s' % (execution_model.id), 'method': 'GET'", "'thing' } } }, # Traces { 'path': '/v1/traces', 'method':", "'/v1/config_schemas', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/config_schemas/dummy_pack_1', 'method':", "True }, { 'path': '/v1/config_schemas/dummy_pack_1', 'method': 'GET' }, { 'path':", "{ 'path': '/v1/executions/%s' % (execution_model.id), 'method': 'DELETE' # stop execution", "Traces { 'path': '/v1/traces', 'method': 'GET', 'is_getall': True }, {", "for endpoint in supported_endpoints: response = self._perform_request_for_endpoint(endpoint=endpoint) msg = '%s", "controllers { 'path': '/v1/executions/%s/attribute/trigger_instance' % (execution_model.id), 'method': 'GET' }, {", "OrderedDict import six import mock from st2common.services import triggers as", "% (execution_model.id), 'method': 'DELETE' # stop execution }, { 'path':", "'is_getall': True }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'GET' }, {", "'/v1/executions/%s/re_run' % (execution_model.id), 'method': 'POST', # re-run execution 'payload': {'parameters':", "License, Version 2.0 # (the \"License\"); you may not use", "'DELETE' # stop execution }, { 'path': '/v1/executions/%s/re_run' % (execution_model.id),", "'path': '/v1/service_registry/groups/mock_service/members', 'method': 'GET', 'is_getall': True } ] self.use_user(self.users['no_permissions']) for", "may not use this file except in compliance with #", "{ 'path': '/v1/rbac/roles', 'method': 'GET', 'is_getall': True }, { 'path':", "{ 'ref': 'sixpack.st2.test.action', 'parameters': { 'ip2': '{{rule.k1}}', 'ip1': '{{trigger.t1_p}}' }", "'path': '/v1/rbac/roles', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/rbac/roles/admin',", "test ?limit=-1 - admin user self.use_user(self.users['admin']) for endpoint in supported_endpoints:", "agreed to in writing, software # distributed under the License", "# Webhook { 'path': '/v1/webhooks/st2', 'method': 'POST', 'payload': { 'trigger':", "objects - those objects are used to test get one,", "self.assertEqual(response.status_code, http_client.FORBIDDEN, msg) # Also test ?limit=-1 - admin user", "distributed under the License is distributed on an \"AS IS\"", "user self.use_user(self.users['admin']) for endpoint in supported_endpoints: if not endpoint.get('is_getall', False):", "Webhook { 'path': '/v1/webhooks/st2', 'method': 'POST', 'payload': { 'trigger': 'some',", "{'enabled': False} }, # Actions { 'path': '/v1/actions', 'method': 'GET',", "}, { 'path': '/v1/executions/%s/output' % (execution_model.id), 'method': 'GET' }, {", "{ 'trigger': 'some', 'payload': { 'some': 'thing' } } },", "'GET', 'is_getall': True }, { 'path': '/v1/rbac/permission_types/action', 'method': 'GET' },", "('triggers', ['trigger1.yaml', 'cron1.yaml']), ('rules', ['rule1.yaml']), ('triggertypes', ['triggertype1.yaml']), ('executions', ['execution1.yaml']), ('liveactions',", "'/v1/service_registry/groups', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/service_registry/groups/mock_service/members', 'method':", "'GET' }, # Alias executions { 'path': '/v1/aliasexecution', 'method': 'POST',", "'entry_point': '/tmp/test/action2.py', 'runner_type': 'local-shell-script', 'parameters': { 'c': {'type': 'string', 'default':", "roles { 'path': '/v1/rbac/roles', 'method': 'GET', 'is_getall': True }, {", "under the Apache License, Version 2.0 # (the \"License\"); you", "# the License. You may obtain a copy of the", "express or implied. # See the License for the specific", "'st2.test.rule2', 'pack': 'yoyohoneysingh', 'trigger': { 'type': 'wolfpack.triggertype-1' }, 'criteria': {", "this work for additional information regarding copyright ownership. # The", "with a user which has no permissions and makes sure", "'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'PUT', 'payload': {'enabled': False} },", "{ 'path': '/v1/actionalias', 'method': 'GET', 'is_getall': True }, { 'path':", "self.app.delete(endpoint['path'], expect_errors=True) else: raise ValueError('Unsupported method: %s' % (endpoint['method'])) return", "'method': 'GET' }, # RBAC - roles { 'path': '/v1/rbac/roles',", "'payload': MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'PUT', 'payload': MOCK_ACTION_ALIAS_1", "= self.app.get('/v1/packs/views/file/dummy_pack_2/icon.png') self.assertEqual(response.status_code, http_client.OK) # Other files should return forbidden", "['rule1.yaml']), ('triggertypes', ['triggertype1.yaml']), ('executions', ['execution1.yaml']), ('liveactions', ['liveaction1.yaml', 'parentliveaction.yaml', 'childliveaction.yaml']), ('enforcements',", "__all__ = [ 'APIControllersRBACTestCase' ] FIXTURES_PACK = 'generic' TEST_FIXTURES =", "return 403 status code (body=%s)' % (endpoint['method'], endpoint['path'], response.body) self.assertEqual(response.status_code,", "'/v1/packs/register', 'method': 'POST', 'payload': {'types': ['actions']} }, { 'path': '/v1/packs/index/search',", "Pack management { 'path': '/v1/packs/install', 'method': 'POST', 'payload': {'packs': 'libcloud'}", "writing, software # distributed under the License is distributed on", "'path': '/v1/traces/%s' % (trace_model.id), 'method': 'GET' }, # Timers {", "}, # RBAC - user role assignments { 'path': '/v1/rbac/role_assignments',", "{ 'pattern': 't1_p_v', 'type': 'equals' } }, 'action': { 'ref':", "doesn't require any permissions response = self.app.get('/v1/packs/views/file/dummy_pack_2/icon.png') self.assertEqual(response.status_code, http_client.OK) #", "'foo bar ponies', 'user': 'channel', 'source_channel': 'bar'} }, # Webhook", "trigger_service with mock.patch.object(trigger_service, 'create_trigger_type_db', mock.MagicMock()): from st2api.controllers.v1.webhooks import HooksHolder from", "you may not use this file except in compliance with", "'/v1/actionalias/aliases.alias1', 'method': 'DELETE' }, { 'path': '/v1/actionalias/match', 'method': 'POST', 'payload':", "}, { 'path': '/v1/rbac/permission_types/action', 'method': 'GET' }, # Action views", "}, { 'path': '/v1/packs/uninstall', 'method': 'POST', 'payload': {'packs': 'libcloud'} },", "'method': 'PUT', 'payload': MOCK_RULE_1 }, { 'path': '/v1/rules/%s' % (rule_model.ref),", "the License. You may obtain a copy of the License", "}, { 'path': '/v1/actions', 'method': 'POST', 'payload': MOCK_ACTION_1 }, {", "'criteria': { 'trigger.k1': { 'pattern': 't1_p_v', 'type': 'equals' } },", "service in the service registry for testing purposes service =", "Rule views { 'path': '/v1/rules/views', 'method': 'GET', 'is_getall': True },", "'/v1/actionalias', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/actionalias/aliases.alias1', 'method':", "'path': '/v1/packs/index/health', 'method': 'GET' }, # Pack views { 'path':", "'%s \"%s\" didn\\'t return 403 status code (body=%s)' % (endpoint['method'],", "Action views { 'path': '/v1/actions/views/overview', 'method': 'GET', 'is_getall': True },", "}, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'PUT', 'payload': MOCK_ACTION_1 }, {", "# Packs { 'path': '/v1/packs', 'method': 'GET', 'is_getall': True },", "Service registry { 'path': '/v1/service_registry/groups', 'method': 'GET', 'is_getall': True },", "def test_api_endpoints_behind_rbac_wall(self): # alias_model = self.models['aliases']['alias1.yaml'] sensor_model = self.models['sensors']['sensor1.yaml'] rule_model", "CONDITIONS OF ANY KIND, either express or implied. # See", "Version 2.0 # (the \"License\"); you may not use this", "'method': 'POST', 'payload': {'types': ['actions']} }, { 'path': '/v1/packs/index/search', 'method':", "from collections import OrderedDict import six import mock from st2common.services", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/runnertypes/test-runner-1', 'method': 'GET'", "register_service_in_service_registry from st2common.services import coordination from st2tests import config as", "{ 'path': '/v1/executions/%s/attribute/trigger_instance' % (execution_model.id), 'method': 'GET' }, { 'path':", "tests.unit.controllers.v1.test_webhooks import DUMMY_TRIGGER_DICT http_client = six.moves.http_client __all__ = [ 'APIControllersRBACTestCase'", "True }, { 'path': '/v1/ruleenforcements/%s' % (enforcement_model.id), 'method': 'GET' },", "'path': '/v1/rbac/role_assignments/%s' % (self.role_assignment_db_model['id']), 'method': 'GET' }, # RBAC -", "}, # Packs { 'path': '/v1/packs', 'method': 'GET', 'is_getall': True", "'PUT', 'payload': MOCK_RULE_1 }, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method':", "'name': 'test-runner-1', 'description': 'test', 'enabled': False } MOCK_ACTION_1 = {", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "FIXTURES_PACK = 'generic' TEST_FIXTURES = OrderedDict([ ('runners', ['testrunner1.yaml', 'run-local.yaml']), ('sensors',", "\"\"\" register_packs = True fixtures_loader = FixturesLoader() coordinator = None", "'path': '/v1/packs/uninstall', 'method': 'POST', 'payload': {'packs': 'libcloud'} }, { 'path':", "'/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'GET' }, { 'path': '/v1/sensortypes/%s' %", "source='assignments/user.yaml') UserRoleAssignment.add_or_update(self.role_assignment_db_model) @mock.patch.object(HooksHolder, 'get_triggers_for_hook', mock.MagicMock( return_value=[DUMMY_TRIGGER_DICT])) def test_api_endpoints_behind_rbac_wall(self): # alias_model", "'path': '/v1/config_schemas', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/config_schemas/dummy_pack_1',", "= self.app.get(endpoint['path'], expect_errors=True) elif endpoint['method'] == 'POST': return self.app.post_json(endpoint['path'], endpoint['payload'],", "no permissions and makes sure API returns access denied. \"\"\"", "to test get one, edit and delete operations self.models =", "'/v1/rules/%s' % (rule_model.ref), 'method': 'PUT', 'payload': MOCK_RULE_1 }, { 'path':", "% (execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions', 'method': 'POST',", "'GET', 'is_getall': True }, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method':", "- non-admin user self.use_user(self.users['observer']) for endpoint in supported_endpoints: if not", "'path': '/v1/executions/%s' % (execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions/%s/output'", "edit and delete operations self.models = self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK, fixtures_dict=TEST_FIXTURES) self.role_assignment_db_model =", "didn\\'t return 403 status code (body=%s)' % (endpoint['method'], endpoint['path'], response.body)", "{ 'path': '/v1/actions', 'method': 'GET', 'is_getall': True }, { 'path':", "Pack views { 'path': '/v1/packs/views/files/dummy_pack_1', 'method': 'GET' }, # Pack", "'test command string'} }, # Rules { 'path': '/v1/rules', 'method':", "'method': 'GET' }, # Alias executions { 'path': '/v1/aliasexecution', 'method':", "UserRoleAssignment from st2common.models.db.rbac import UserRoleAssignmentDB from st2common.service_setup import register_service_in_service_registry from", "description', 'action_ref': 'core.local', 'formats': ['a', 'b'] } MOCK_RULE_1 = {", "msg = '%s \"%s\" didn\\'t return 403 status code (body=%s)'", "response = self.app.get('/v1/packs/views/file/dummy_pack_2/icon.png') self.assertEqual(response.status_code, http_client.OK) # Other files should return", "(execution_model.id), 'method': 'GET' }, # Alias executions { 'path': '/v1/aliasexecution',", "import FixturesLoader from open_rbac.tests import APIControllerWithRBACTestCase from tests.unit.controllers.v1.test_webhooks import DUMMY_TRIGGER_DICT", "'method': 'GET' }, { 'path': '/v1/timers/%s' % (timer_model.id), 'method': 'GET'", "}, { 'path': '/v1/timers/%s' % (timer_model.id), 'method': 'GET' }, #", "'action_ref': 'core.local', 'formats': ['a', 'b'] } MOCK_RULE_1 = { 'enabled':", "self.models = self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK, fixtures_dict=TEST_FIXTURES) self.role_assignment_db_model = UserRoleAssignmentDB( user='user', role='role', source='assignments/user.yaml')", "# Rules { 'path': '/v1/rules', 'method': 'GET', 'is_getall': True },", "MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'PUT', 'payload': MOCK_ACTION_1 },", "}, # RBAC - permission types { 'path': '/v1/rbac/permission_types', 'method':", "self.assertEqual(response.status_code, http_client.FORBIDDEN) def _perform_request_for_endpoint(self, endpoint): if endpoint['method'] == 'GET': response", "schedule execution / run action }, { 'path': '/v1/executions/%s' %", "('liveactions', ['liveaction1.yaml', 'parentliveaction.yaml', 'childliveaction.yaml']), ('enforcements', ['enforcement1.yaml']), ('apikeys', ['apikey1.yaml']), ('traces', ['trace_for_test_enforce.yaml'])", "'/v1/executions', 'method': 'POST', 'payload': {'action': 'core.local'} # schedule execution /", "'/v1/webhooks/st2', 'method': 'POST', 'payload': { 'trigger': 'some', 'payload': { 'some':", "expect_errors=True) msg = '%s \"%s\" didn\\'t return 403 status code", "supported_endpoints = [ # Runners { 'path': '/v1/runnertypes', 'method': 'GET',", "'GET' }, # Pack management { 'path': '/v1/packs/install', 'method': 'POST',", "/ run action }, { 'path': '/v1/executions/%s' % (execution_model.id), 'method':", "'path': '/v1/runnertypes', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/runnertypes/test-runner-1',", "re-run execution 'payload': {'parameters': {}} }, # Action execution nested", "'?limit=-1') self.assertEqual(response.status_code, http_client.OK) def test_icon_png_file_is_whitelisted(self): self.use_user(self.users['no_permissions']) # Test that access", "'GET' }, { 'path': '/v1/actionalias', 'method': 'POST', 'payload': MOCK_ACTION_ALIAS_1 },", "'default': 'D1', 'immutable': True} } } MOCK_ACTION_ALIAS_1 = { 'name':", "'/v1/actions/wolfpack.action-1', 'method': 'GET' }, { 'path': '/v1/actions', 'method': 'POST', 'payload':", "'payload': MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'PUT', 'payload': MOCK_ACTION_1", "'payload': {'parameters': {}} }, # Action execution nested controllers {", "'/v1/rules/views', 'method': 'GET', 'is_getall': True }, # Service registry {", "False): continue response = self.app.get(endpoint['path'] + '?limit=-1') self.assertEqual(response.status_code, http_client.OK) def", "MOCK_RULE_1 }, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'PUT', 'payload':", "endpoint in supported_endpoints: response = self._perform_request_for_endpoint(endpoint=endpoint) msg = '%s \"%s\"", "continue response = self.app.get(endpoint['path'] + '?limit=-1', expect_errors=True) msg = '%s", "}, # Pack configs { 'path': '/v1/configs', 'method': 'GET', 'is_getall':", "# Action Executions { 'path': '/v1/executions', 'method': 'GET', 'is_getall': True", "'method': 'DELETE' }, # Rule enforcements { 'path': '/v1/ruleenforcements', 'method':", "expect_errors=True) elif endpoint['method'] == 'POST': return self.app.post_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif", "Inc ('StackStorm') under one or more # contributor license agreements.", "'path': '/v1/executions', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/executions/%s'", "(the \"License\"); you may not use this file except in", "# Pack configs { 'path': '/v1/configs', 'method': 'GET', 'is_getall': True", "'/v1/config_schemas/dummy_pack_1', 'method': 'GET' }, { 'path': '/v1/packs/views/file/dummy_pack_1/pack.yaml', 'method': 'GET' },", "OR CONDITIONS OF ANY KIND, either express or implied. #", "testing purposes service = six.binary_type(six.text_type('mock_service').encode('ascii')) register_service_in_service_registry(service=service, capabilities={'key1': 'value1', 'name': 'mock_service'},", "http_client = six.moves.http_client __all__ = [ 'APIControllersRBACTestCase' ] FIXTURES_PACK =", "'POST', 'payload': {'packs': 'libcloud'} }, { 'path': '/v1/packs/register', 'method': 'POST',", "_perform_request_for_endpoint(self, endpoint): if endpoint['method'] == 'GET': response = self.app.get(endpoint['path'], expect_errors=True)", "has no permissions and makes sure API returns access denied.", "the License is distributed on an \"AS IS\" BASIS, #", "'/v1/packs/install', 'method': 'POST', 'payload': {'packs': 'libcloud'} }, { 'path': '/v1/packs/uninstall',", "'method': 'POST', 'payload': MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'PUT',", "schemas { 'path': '/v1/config_schemas', 'method': 'GET', 'is_getall': True }, {", "% (self.role_assignment_db_model['id']), 'method': 'GET' }, # RBAC - permission types", "+ '?limit=-1') self.assertEqual(response.status_code, http_client.OK) def test_icon_png_file_is_whitelisted(self): self.use_user(self.users['no_permissions']) # Test that", "response.body) self.assertEqual(response.status_code, http_client.FORBIDDEN, msg) # Also test ?limit=-1 - admin", "<reponame>cognifloyd/st2-open-rbac<gh_stars>0 # Licensed to the StackStorm, Inc ('StackStorm') under one", "delete operations self.models = self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK, fixtures_dict=TEST_FIXTURES) self.role_assignment_db_model = UserRoleAssignmentDB( user='user',", "expect_errors=True) self.assertEqual(response.status_code, http_client.FORBIDDEN) def _perform_request_for_endpoint(self, endpoint): if endpoint['method'] == 'GET':", "'enabled': True, 'name': 'st2.test.rule2', 'pack': 'yoyohoneysingh', 'trigger': { 'type': 'wolfpack.triggertype-1'", "# Pack config schemas { 'path': '/v1/config_schemas', 'method': 'GET', 'is_getall':", "} MOCK_ACTION_ALIAS_1 = { 'name': 'alias3', 'pack': 'aliases', 'description': 'test", "{ 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'GET' }, { 'path':", "'path': '/v1/packs/index/search', 'method': 'POST', 'payload': {'query': 'cloud'} }, { 'path':", "['trace_for_test_enforce.yaml']) ]) MOCK_RUNNER_1 = { 'name': 'test-runner-1', 'description': 'test', 'enabled':", "'path': '/v1/configs/dummy_pack_1', 'method': 'PUT', 'payload': { 'foo': 'bar' } },", "# Runners { 'path': '/v1/runnertypes', 'method': 'GET', 'is_getall': True },", "{ 'path': '/v1/rules', 'method': 'POST', 'payload': MOCK_RULE_1 }, { 'path':", "{ 'path': '/v1/executions/%s/re_run' % (execution_model.id), 'method': 'POST', # re-run execution", "'POST', # re-run execution 'payload': {'parameters': {}} }, # Action", "timer_model = self.models['triggers']['cron1.yaml'] supported_endpoints = [ # Runners { 'path':", "self.app.put_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method'] == 'DELETE': return self.app.delete(endpoint['path'], expect_errors=True)", "'PUT', 'payload': MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'DELETE' },", "stop execution }, { 'path': '/v1/executions/%s/re_run' % (execution_model.id), 'method': 'POST',", "{ 'path': '/v1/ruleenforcements/%s' % (enforcement_model.id), 'method': 'GET' }, # Action", "MOCK_RULE_1 = { 'enabled': True, 'name': 'st2.test.rule2', 'pack': 'yoyohoneysingh', 'trigger':", "'enabled': True, 'entry_point': '/tmp/test/action2.py', 'runner_type': 'local-shell-script', 'parameters': { 'c': {'type':", "% (trace_model.id), 'method': 'GET' }, # Timers { 'path': '/v1/timers',", "'is_getall': True }, { 'path': '/v1/configs/dummy_pack_1', 'method': 'GET' }, {", "'enabled': False } MOCK_ACTION_1 = { 'name': 'ma.dummy.action', 'pack': 'examples',", "# RBAC - user role assignments { 'path': '/v1/rbac/role_assignments', 'method':", "}, 'description': '' } class APIControllersRBACTestCase(APIControllerWithRBACTestCase): \"\"\" Test class which", "'runner_type': 'local-shell-script', 'parameters': { 'c': {'type': 'string', 'default': 'C1', 'position':", "(execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions/%s/output' % (execution_model.id), 'method':", "limitations under the License. from collections import OrderedDict import six", "]) MOCK_RUNNER_1 = { 'name': 'test-runner-1', 'description': 'test', 'enabled': False", "'path': '/v1/runnertypes/test-runner-1', 'method': 'GET' }, { 'path': '/v1/runnertypes/test-runner-1', 'method': 'PUT',", "more # contributor license agreements. See the NOTICE file distributed", "{ 'path': '/v1/packs', 'method': 'GET', 'is_getall': True }, { 'path':", "'POST', 'payload': MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'PUT', 'payload':", "setUpClass(cls): tests_config.parse_args(coordinator_noop=True) super(APIControllersRBACTestCase, cls).setUpClass() cls.coordinator = coordination.get_coordinator(use_cache=False) # Register mock", "# Action views { 'path': '/v1/actions/views/overview', 'method': 'GET', 'is_getall': True", "= None @classmethod def setUpClass(cls): tests_config.parse_args(coordinator_noop=True) super(APIControllersRBACTestCase, cls).setUpClass() cls.coordinator =", "} MOCK_ACTION_1 = { 'name': 'ma.dummy.action', 'pack': 'examples', 'description': 'test", "'method': 'DELETE' }, { 'path': '/v1/actionalias/match', 'method': 'POST', 'payload': {'command':", "'GET' }, { 'path': '/v1/rules', 'method': 'POST', 'payload': MOCK_RULE_1 },", "# Register mock service in the service registry for testing", "law or agreed to in writing, software # distributed under", "'description': 'test description', 'enabled': True, 'entry_point': '/tmp/test/action2.py', 'runner_type': 'local-shell-script', 'parameters':", "'/v1/configs/dummy_pack_1', 'method': 'GET' }, { 'path': '/v1/configs/dummy_pack_1', 'method': 'PUT', 'payload':", "'path': '/v1/sensortypes', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/sensortypes/%s'", "# Action aliases { 'path': '/v1/actionalias', 'method': 'GET', 'is_getall': True", "# Action execution nested controllers { 'path': '/v1/executions/%s/attribute/trigger_instance' % (execution_model.id),", "for endpoint in supported_endpoints: if not endpoint.get('is_getall', False): continue response", "http_client.FORBIDDEN) def _perform_request_for_endpoint(self, endpoint): if endpoint['method'] == 'GET': response =", "'GET' }, # Pack configs { 'path': '/v1/configs', 'method': 'GET',", "}, { 'path': '/v1/traces/%s' % (trace_model.id), 'method': 'GET' }, #", "'/v1/rbac/roles', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/rbac/roles/admin', 'method':", "True }, # Service registry { 'path': '/v1/service_registry/groups', 'method': 'GET',", "'GET', 'is_getall': True }, { 'path': '/v1/packs/dummy_pack_1', 'method': 'GET' },", "True }, { 'path': '/v1/service_registry/groups/mock_service/members', 'method': 'GET', 'is_getall': True }", "if not endpoint.get('is_getall', False): continue response = self.app.get(endpoint['path'] + '?limit=-1',", "}, { 'path': '/v1/webhooks/git', 'method': 'GET' }, # RBAC -", "= '%s \"%s\" didn\\'t return 403 status code (body=%s)' %", "'path': '/v1/runnertypes/test-runner-1', 'method': 'PUT', 'payload': MOCK_RUNNER_1 }, # Packs {", "'sixpack.st2.test.action', 'parameters': { 'ip2': '{{rule.k1}}', 'ip1': '{{trigger.t1_p}}' } }, 'description':", "string'} }, # Rules { 'path': '/v1/rules', 'method': 'GET', 'is_getall':", "True } ] self.use_user(self.users['no_permissions']) for endpoint in supported_endpoints: response =", "Rules { 'path': '/v1/rules', 'method': 'GET', 'is_getall': True }, {", "'POST', 'payload': { 'trigger': 'some', 'payload': { 'some': 'thing' }", "'/v1/packs/index/search', 'method': 'POST', 'payload': {'query': 'cloud'} }, { 'path': '/v1/packs/index/health',", "}, { 'path': '/v1/packs/register', 'method': 'POST', 'payload': {'types': ['actions']} },", "'payload': MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'DELETE' }, #", "'GET' }, { 'path': '/v1/executions/%s/output' % (execution_model.id), 'method': 'GET' },", "nested controllers { 'path': '/v1/executions/%s/attribute/trigger_instance' % (execution_model.id), 'method': 'GET' },", "'test', 'enabled': False } MOCK_ACTION_1 = { 'name': 'ma.dummy.action', 'pack':", "{ 'path': '/v1/rbac/role_assignments', 'method': 'GET', 'is_getall': True }, { 'path':", "# Also test ?limit=-1 - non-admin user self.use_user(self.users['observer']) for endpoint", "return forbidden response = self.app.get('/v1/packs/views/file/dummy_pack_2/pack.yaml', expect_errors=True) self.assertEqual(response.status_code, http_client.FORBIDDEN) def _perform_request_for_endpoint(self,", "the NOTICE file distributed with # this work for additional", "} }, 'description': '' } class APIControllersRBACTestCase(APIControllerWithRBACTestCase): \"\"\" Test class", "}, { 'path': '/v1/packs/views/file/dummy_pack_1/pack.yaml', 'method': 'GET' }, # Pack configs", "{ 'path': '/v1/actions/wolfpack.action-1', 'method': 'DELETE' }, # Action aliases {", "'/v1/actions/wolfpack.action-1', 'method': 'PUT', 'payload': MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1', 'method':", "{ 'path': '/v1/traces/%s' % (trace_model.id), 'method': 'GET' }, # Timers", "'path': '/v1/timers', 'method': 'GET' }, { 'path': '/v1/timers/%s' % (timer_model.id),", "may obtain a copy of the License at # #", "the Apache License, Version 2.0 # (the \"License\"); you may", "return_value=[DUMMY_TRIGGER_DICT])) def test_api_endpoints_behind_rbac_wall(self): # alias_model = self.models['aliases']['alias1.yaml'] sensor_model = self.models['sensors']['sensor1.yaml']", "any permissions response = self.app.get('/v1/packs/views/file/dummy_pack_2/icon.png') self.assertEqual(response.status_code, http_client.OK) # Other files", "FixturesLoader() coordinator = None @classmethod def setUpClass(cls): tests_config.parse_args(coordinator_noop=True) super(APIControllersRBACTestCase, cls).setUpClass()", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/configs/dummy_pack_1', 'method': 'GET'", "fixtures_loader = FixturesLoader() coordinator = None @classmethod def setUpClass(cls): tests_config.parse_args(coordinator_noop=True)", "('StackStorm') under one or more # contributor license agreements. See", "\"%s\" didn\\'t return 403 status code (body=%s)' % (endpoint['method'], endpoint['path'],", "http_client.OK) def test_icon_png_file_is_whitelisted(self): self.use_user(self.users['no_permissions']) # Test that access to icon.png", "enforcement_model = self.models['enforcements']['enforcement1.yaml'] execution_model = self.models['executions']['execution1.yaml'] trace_model = self.models['traces']['trace_for_test_enforce.yaml'] timer_model", "'path': '/v1/webhooks/git', 'method': 'GET' }, # RBAC - roles {", "and # limitations under the License. from collections import OrderedDict", "}, # Alias executions { 'path': '/v1/aliasexecution', 'method': 'POST', 'payload':", "# (the \"License\"); you may not use this file except", "user which has no permissions and makes sure API returns", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "'/v1/rbac/roles/admin', 'method': 'GET' }, # RBAC - user role assignments", "Register mock service in the service registry for testing purposes", "{ 'path': '/v1/runnertypes/test-runner-1', 'method': 'GET' }, { 'path': '/v1/runnertypes/test-runner-1', 'method':", "{'types': ['actions']} }, { 'path': '/v1/packs/index/search', 'method': 'POST', 'payload': {'query':", "'core.local', 'formats': ['a', 'b'] } MOCK_RULE_1 = { 'enabled': True,", "% (execution_model.id), 'method': 'GET' }, # Alias executions { 'path':", "License. from collections import OrderedDict import six import mock from", "'string', 'default': 'C1', 'position': 0}, 'd': {'type': 'string', 'default': 'D1',", "('triggertypes', ['triggertype1.yaml']), ('executions', ['execution1.yaml']), ('liveactions', ['liveaction1.yaml', 'parentliveaction.yaml', 'childliveaction.yaml']), ('enforcements', ['enforcement1.yaml']),", "'pattern': 't1_p_v', 'type': 'equals' } }, 'action': { 'ref': 'sixpack.st2.test.action',", "'GET' }, { 'path': '/v1/configs/dummy_pack_1', 'method': 'PUT', 'payload': { 'foo':", "'is_getall': True }, { 'path': '/v1/traces/%s' % (trace_model.id), 'method': 'GET'", "in compliance with # the License. You may obtain a", "execution }, { 'path': '/v1/executions/%s/re_run' % (execution_model.id), 'method': 'POST', #", "executions { 'path': '/v1/aliasexecution', 'method': 'POST', 'payload': {'name': 'alias1', 'format':", "}, { 'path': '/v1/ruleenforcements/%s' % (enforcement_model.id), 'method': 'GET' }, #", "{ 'path': '/v1/packs/uninstall', 'method': 'POST', 'payload': {'packs': 'libcloud'} }, {", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "'/v1/ruleenforcements', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/ruleenforcements/%s' %", "'D1', 'immutable': True} } } MOCK_ACTION_ALIAS_1 = { 'name': 'alias3',", "'/v1/actionalias/aliases.alias1', 'method': 'GET' }, { 'path': '/v1/actionalias', 'method': 'POST', 'payload':", "'is_getall': True }, { 'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'GET'", "APIControllersRBACTestCase(APIControllerWithRBACTestCase): \"\"\" Test class which hits all the API endpoints", "'method': 'DELETE' # stop execution }, { 'path': '/v1/executions/%s/re_run' %", "six import mock from st2common.services import triggers as trigger_service with", "'path': '/v1/packs/register', 'method': 'POST', 'payload': {'types': ['actions']} }, { 'path':", "icon.png file doesn't require any permissions response = self.app.get('/v1/packs/views/file/dummy_pack_2/icon.png') self.assertEqual(response.status_code,", "are behind the RBAC wall with a user which has", "those objects are used to test get one, edit and", "views { 'path': '/v1/packs/views/files/dummy_pack_1', 'method': 'GET' }, # Pack config", "test_api_endpoints_behind_rbac_wall(self): # alias_model = self.models['aliases']['alias1.yaml'] sensor_model = self.models['sensors']['sensor1.yaml'] rule_model =", "bar ponies', 'user': 'channel', 'source_channel': 'bar'} }, # Webhook {", "True, 'name': 'st2.test.rule2', 'pack': 'yoyohoneysingh', 'trigger': { 'type': 'wolfpack.triggertype-1' },", "license agreements. See the NOTICE file distributed with # this", "import coordination from st2tests import config as tests_config from st2tests.fixturesloader", "True }, { 'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'GET' },", "'method': 'GET' }, # Action views { 'path': '/v1/actions/views/overview', 'method':", "user='user', role='role', source='assignments/user.yaml') UserRoleAssignment.add_or_update(self.role_assignment_db_model) @mock.patch.object(HooksHolder, 'get_triggers_for_hook', mock.MagicMock( return_value=[DUMMY_TRIGGER_DICT])) def test_api_endpoints_behind_rbac_wall(self):", "API endpoints which are behind the RBAC wall with a", "'/v1/packs', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/packs/dummy_pack_1', 'method':", "(endpoint['method'], endpoint['path'], response.body) self.assertEqual(response.status_code, http_client.FORBIDDEN, msg) # Also test ?limit=-1", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "run action }, { 'path': '/v1/executions/%s' % (execution_model.id), 'method': 'DELETE'", "registry for testing purposes service = six.binary_type(six.text_type('mock_service').encode('ascii')) register_service_in_service_registry(service=service, capabilities={'key1': 'value1',", "{ 'path': '/v1/packs/index/search', 'method': 'POST', 'payload': {'query': 'cloud'} }, {", "'GET', 'is_getall': True }, { 'path': '/v1/config_schemas/dummy_pack_1', 'method': 'GET' },", "# Webhooks { 'path': '/v1/webhooks', 'method': 'GET' }, { 'path':", "MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'PUT', 'payload': MOCK_ACTION_ALIAS_1 },", "capabilities={'key1': 'value1', 'name': 'mock_service'}, start_heart=True) @classmethod def tearDownClass(cls): super(APIControllersRBACTestCase, cls).tearDownClass()", "contributor license agreements. See the NOTICE file distributed with #", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "'/v1/rbac/permission_types/action', 'method': 'GET' }, # Action views { 'path': '/v1/actions/views/overview',", "endpoint.get('is_getall', False): continue response = self.app.get(endpoint['path'] + '?limit=-1', expect_errors=True) msg", "to icon.png file doesn't require any permissions response = self.app.get('/v1/packs/views/file/dummy_pack_2/icon.png')", "Also test ?limit=-1 - non-admin user self.use_user(self.users['observer']) for endpoint in", "'libcloud'} }, { 'path': '/v1/packs/register', 'method': 'POST', 'payload': {'types': ['actions']}", "execution_model = self.models['executions']['execution1.yaml'] trace_model = self.models['traces']['trace_for_test_enforce.yaml'] timer_model = self.models['triggers']['cron1.yaml'] supported_endpoints", "self).setUp() # Register packs if self.register_packs: self._register_packs() # Insert mock", "status code (body=%s)' % (endpoint['method'], endpoint['path'], response.body) self.assertEqual(response.status_code, http_client.FORBIDDEN, msg)", "} }, # Traces { 'path': '/v1/traces', 'method': 'GET', 'is_getall':", "'payload': {'packs': 'libcloud'} }, { 'path': '/v1/packs/register', 'method': 'POST', 'payload':", "'POST', 'payload': {'query': 'cloud'} }, { 'path': '/v1/packs/index/health', 'method': 'GET'", "}, # Rule views { 'path': '/v1/rules/views', 'method': 'GET', 'is_getall':", "= self.app.get(endpoint['path'] + '?limit=-1', expect_errors=True) msg = '%s \"%s\" didn\\'t", "'/v1/runnertypes', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/runnertypes/test-runner-1', 'method':", "wall with a user which has no permissions and makes", "}, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'GET' }, { 'path': '/v1/actions',", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "['trigger1.yaml', 'cron1.yaml']), ('rules', ['rule1.yaml']), ('triggertypes', ['triggertype1.yaml']), ('executions', ['execution1.yaml']), ('liveactions', ['liveaction1.yaml',", "'method': 'PUT', 'payload': MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'DELETE'", "'mock_service'}, start_heart=True) @classmethod def tearDownClass(cls): super(APIControllersRBACTestCase, cls).tearDownClass() coordination.coordinator_teardown(cls.coordinator) def setUp(self):", "'path': '/v1/actionalias/aliases.alias1', 'method': 'PUT', 'payload': MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1',", "'path': '/v1/rbac/roles/admin', 'method': 'GET' }, # RBAC - user role", "'payload': MOCK_RULE_1 }, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'PUT',", "'/v1/aliasexecution', 'method': 'POST', 'payload': {'name': 'alias1', 'format': 'foo bar ponies',", "enforcements { 'path': '/v1/ruleenforcements', 'method': 'GET', 'is_getall': True }, {", "triggers as trigger_service with mock.patch.object(trigger_service, 'create_trigger_type_db', mock.MagicMock()): from st2api.controllers.v1.webhooks import", "'payload': {'action': 'core.local'} # schedule execution / run action },", "file except in compliance with # the License. You may", "{'packs': 'libcloud'} }, { 'path': '/v1/packs/register', 'method': 'POST', 'payload': {'types':", "'payload': { 'some': 'thing' } } }, # Traces {", "from st2common.services import triggers as trigger_service with mock.patch.object(trigger_service, 'create_trigger_type_db', mock.MagicMock()):", "# Insert mock objects - those objects are used to", "% (rule_model.ref), 'method': 'DELETE' }, # Rule enforcements { 'path':", "execution 'payload': {'parameters': {}} }, # Action execution nested controllers", "}, # Pack management { 'path': '/v1/packs/install', 'method': 'POST', 'payload':", "'method': 'POST', 'payload': {'action': 'core.local'} # schedule execution / run", "this file except in compliance with # the License. You", "def setUpClass(cls): tests_config.parse_args(coordinator_noop=True) super(APIControllersRBACTestCase, cls).setUpClass() cls.coordinator = coordination.get_coordinator(use_cache=False) # Register", "# RBAC - permission types { 'path': '/v1/rbac/permission_types', 'method': 'GET',", "'trigger': { 'type': 'wolfpack.triggertype-1' }, 'criteria': { 'trigger.k1': { 'pattern':", "'method': 'GET' }, { 'path': '/v1/actionalias', 'method': 'POST', 'payload': MOCK_ACTION_ALIAS_1", "mock.MagicMock( return_value=[DUMMY_TRIGGER_DICT])) def test_api_endpoints_behind_rbac_wall(self): # alias_model = self.models['aliases']['alias1.yaml'] sensor_model =", "'path': '/v1/packs/views/file/dummy_pack_1/pack.yaml', 'method': 'GET' }, # Pack configs { 'path':", "'APIControllersRBACTestCase' ] FIXTURES_PACK = 'generic' TEST_FIXTURES = OrderedDict([ ('runners', ['testrunner1.yaml',", "views { 'path': '/v1/rules/views', 'method': 'GET', 'is_getall': True }, #", "True} } } MOCK_ACTION_ALIAS_1 = { 'name': 'alias3', 'pack': 'aliases',", "in the service registry for testing purposes service = six.binary_type(six.text_type('mock_service').encode('ascii'))", "['apikey1.yaml']), ('traces', ['trace_for_test_enforce.yaml']) ]) MOCK_RUNNER_1 = { 'name': 'test-runner-1', 'description':", "'GET' }, # Timers { 'path': '/v1/timers', 'method': 'GET' },", "'GET', 'is_getall': True }, # Rule views { 'path': '/v1/rules/views',", "('actions', ['action1.yaml', 'local.yaml']), ('aliases', ['alias1.yaml']), ('triggers', ['trigger1.yaml', 'cron1.yaml']), ('rules', ['rule1.yaml']),", "'some', 'payload': { 'some': 'thing' } } }, # Traces", "{ 'path': '/v1/runnertypes/test-runner-1', 'method': 'PUT', 'payload': MOCK_RUNNER_1 }, # Packs", "= self.models['executions']['execution1.yaml'] trace_model = self.models['traces']['trace_for_test_enforce.yaml'] timer_model = self.models['triggers']['cron1.yaml'] supported_endpoints =", "Apache License, Version 2.0 # (the \"License\"); you may not", "ponies', 'command': 'foo bar ponies', 'user': 'channel', 'source_channel': 'bar'} },", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "{ 'ip2': '{{rule.k1}}', 'ip1': '{{trigger.t1_p}}' } }, 'description': '' }", "('apikeys', ['apikey1.yaml']), ('traces', ['trace_for_test_enforce.yaml']) ]) MOCK_RUNNER_1 = { 'name': 'test-runner-1',", "as trigger_service with mock.patch.object(trigger_service, 'create_trigger_type_db', mock.MagicMock()): from st2api.controllers.v1.webhooks import HooksHolder", "}, { 'path': '/v1/service_registry/groups/mock_service/members', 'method': 'GET', 'is_getall': True } ]", "from st2common.persistence.rbac import UserRoleAssignment from st2common.models.db.rbac import UserRoleAssignmentDB from st2common.service_setup", "st2common.service_setup import register_service_in_service_registry from st2common.services import coordination from st2tests import", "'test description', 'enabled': True, 'entry_point': '/tmp/test/action2.py', 'runner_type': 'local-shell-script', 'parameters': {", "import register_service_in_service_registry from st2common.services import coordination from st2tests import config", "}, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'PUT', 'payload': MOCK_ACTION_ALIAS_1 }, {", "(rule_model.ref), 'method': 'DELETE' }, # Rule enforcements { 'path': '/v1/ruleenforcements',", "register_service_in_service_registry(service=service, capabilities={'key1': 'value1', 'name': 'mock_service'}, start_heart=True) @classmethod def tearDownClass(cls): super(APIControllersRBACTestCase,", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/rbac/roles/admin', 'method': 'GET'", "to the StackStorm, Inc ('StackStorm') under one or more #", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/config_schemas/dummy_pack_1', 'method': 'GET'", "'is_getall': True }, { 'path': '/v1/config_schemas/dummy_pack_1', 'method': 'GET' }, {", "'path': '/v1/rbac/permission_types/action', 'method': 'GET' }, # Action views { 'path':", "'is_getall': True }, { 'path': '/v1/rbac/role_assignments/%s' % (self.role_assignment_db_model['id']), 'method': 'GET'", "== 'POST': return self.app.post_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method'] == 'PUT':", "['liveaction1.yaml', 'parentliveaction.yaml', 'childliveaction.yaml']), ('enforcements', ['enforcement1.yaml']), ('apikeys', ['apikey1.yaml']), ('traces', ['trace_for_test_enforce.yaml']) ])", "self.app.get(endpoint['path'] + '?limit=-1', expect_errors=True) msg = '%s \"%s\" didn\\'t return", "or implied. # See the License for the specific language", "st2api.controllers.v1.webhooks import HooksHolder from st2common.persistence.rbac import UserRoleAssignment from st2common.models.db.rbac import", "rule_model = self.models['rules']['rule1.yaml'] enforcement_model = self.models['enforcements']['enforcement1.yaml'] execution_model = self.models['executions']['execution1.yaml'] trace_model", "configs { 'path': '/v1/configs', 'method': 'GET', 'is_getall': True }, {", "}, { 'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'GET' }, {", "if self.register_packs: self._register_packs() # Insert mock objects - those objects", "'path': '/v1/packs/install', 'method': 'POST', 'payload': {'packs': 'libcloud'} }, { 'path':", "} class APIControllersRBACTestCase(APIControllerWithRBACTestCase): \"\"\" Test class which hits all the", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "'b'] } MOCK_RULE_1 = { 'enabled': True, 'name': 'st2.test.rule2', 'pack':", "'GET' }, # Webhooks { 'path': '/v1/webhooks', 'method': 'GET' },", "== 'GET': response = self.app.get(endpoint['path'], expect_errors=True) elif endpoint['method'] == 'POST':", "}, # Action views { 'path': '/v1/actions/views/overview', 'method': 'GET', 'is_getall':", "{'command': 'test command string'} }, # Rules { 'path': '/v1/rules',", "'alias3', 'pack': 'aliases', 'description': 'test description', 'action_ref': 'core.local', 'formats': ['a',", "}, # Action execution nested controllers { 'path': '/v1/executions/%s/attribute/trigger_instance' %", "\"License\"); you may not use this file except in compliance", "except in compliance with # the License. You may obtain", "'method': 'POST', 'payload': { 'trigger': 'some', 'payload': { 'some': 'thing'", "True }, # Rule views { 'path': '/v1/rules/views', 'method': 'GET',", "'method': 'GET' }, { 'path': '/v1/runnertypes/test-runner-1', 'method': 'PUT', 'payload': MOCK_RUNNER_1", "self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK, fixtures_dict=TEST_FIXTURES) self.role_assignment_db_model = UserRoleAssignmentDB( user='user', role='role', source='assignments/user.yaml') UserRoleAssignment.add_or_update(self.role_assignment_db_model) @mock.patch.object(HooksHolder,", "self.assertEqual(response.status_code, http_client.FORBIDDEN, msg) # Also test ?limit=-1 - non-admin user", "'/v1/configs', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/configs/dummy_pack_1', 'method':", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "packs if self.register_packs: self._register_packs() # Insert mock objects - those", "True }, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'GET' },", "require any permissions response = self.app.get('/v1/packs/views/file/dummy_pack_2/icon.png') self.assertEqual(response.status_code, http_client.OK) # Other", "}, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'DELETE' }, #", "file to You under the Apache License, Version 2.0 #", "'path': '/v1/actionalias/aliases.alias1', 'method': 'DELETE' }, { 'path': '/v1/actionalias/match', 'method': 'POST',", "import HooksHolder from st2common.persistence.rbac import UserRoleAssignment from st2common.models.db.rbac import UserRoleAssignmentDB", "'format': 'foo bar ponies', 'command': 'foo bar ponies', 'user': 'channel',", "{ 'name': 'ma.dummy.action', 'pack': 'examples', 'description': 'test description', 'enabled': True,", "'name': 'alias3', 'pack': 'aliases', 'description': 'test description', 'action_ref': 'core.local', 'formats':", "'path': '/v1/actionalias', 'method': 'POST', 'payload': MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1',", "'DELETE' }, { 'path': '/v1/actionalias/match', 'method': 'POST', 'payload': {'command': 'test", "'path': '/v1/actions/wolfpack.action-1', 'method': 'GET' }, { 'path': '/v1/actions', 'method': 'POST',", "response.body) self.assertEqual(response.status_code, http_client.FORBIDDEN, msg) # Also test ?limit=-1 - non-admin", "{ 'path': '/v1/webhooks', 'method': 'GET' }, { 'path': '/v1/webhooks/git', 'method':", "'pack': 'yoyohoneysingh', 'trigger': { 'type': 'wolfpack.triggertype-1' }, 'criteria': { 'trigger.k1':", "which hits all the API endpoints which are behind the", "- user role assignments { 'path': '/v1/rbac/role_assignments', 'method': 'GET', 'is_getall':", "permission types { 'path': '/v1/rbac/permission_types', 'method': 'GET', 'is_getall': True },", "?limit=-1 - admin user self.use_user(self.users['admin']) for endpoint in supported_endpoints: if", "regarding copyright ownership. # The ASF licenses this file to", "{ 'c': {'type': 'string', 'default': 'C1', 'position': 0}, 'd': {'type':", "from st2common.service_setup import register_service_in_service_registry from st2common.services import coordination from st2tests", "service registry for testing purposes service = six.binary_type(six.text_type('mock_service').encode('ascii')) register_service_in_service_registry(service=service, capabilities={'key1':", "'method': 'POST', 'payload': {'query': 'cloud'} }, { 'path': '/v1/packs/index/health', 'method':", "under one or more # contributor license agreements. See the", "'libcloud'} }, { 'path': '/v1/packs/uninstall', 'method': 'POST', 'payload': {'packs': 'libcloud'}", "# limitations under the License. from collections import OrderedDict import", "which has no permissions and makes sure API returns access", "MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'DELETE' }, { 'path':", "elif endpoint['method'] == 'PUT': return self.app.put_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method']", "}, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'DELETE' }, { 'path': '/v1/actionalias/match',", "NOTICE file distributed with # this work for additional information", "permissions and makes sure API returns access denied. \"\"\" register_packs", "{}} }, # Action execution nested controllers { 'path': '/v1/executions/%s/attribute/trigger_instance'", "['testrunner1.yaml', 'run-local.yaml']), ('sensors', ['sensor1.yaml']), ('actions', ['action1.yaml', 'local.yaml']), ('aliases', ['alias1.yaml']), ('triggers',", "{ 'path': '/v1/actions', 'method': 'POST', 'payload': MOCK_ACTION_1 }, { 'path':", "self.use_user(self.users['observer']) for endpoint in supported_endpoints: if not endpoint.get('is_getall', False): continue", "the RBAC wall with a user which has no permissions", "from st2common.models.db.rbac import UserRoleAssignmentDB from st2common.service_setup import register_service_in_service_registry from st2common.services", "True }, { 'path': '/v1/traces/%s' % (trace_model.id), 'method': 'GET' },", "}, { 'path': '/v1/configs/dummy_pack_1', 'method': 'PUT', 'payload': { 'foo': 'bar'", "Alias executions { 'path': '/v1/aliasexecution', 'method': 'POST', 'payload': {'name': 'alias1',", "% (endpoint['method'], endpoint['path'], response.body) self.assertEqual(response.status_code, http_client.FORBIDDEN, msg) # Also test", "} }, 'action': { 'ref': 'sixpack.st2.test.action', 'parameters': { 'ip2': '{{rule.k1}}',", "endpoint['method'] == 'POST': return self.app.post_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method'] ==", "'string', 'default': 'D1', 'immutable': True} } } MOCK_ACTION_ALIAS_1 = {", "'yoyohoneysingh', 'trigger': { 'type': 'wolfpack.triggertype-1' }, 'criteria': { 'trigger.k1': {", "test get one, edit and delete operations self.models = self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK,", "'wolfpack.triggertype-1' }, 'criteria': { 'trigger.k1': { 'pattern': 't1_p_v', 'type': 'equals'", "# # Unless required by applicable law or agreed to", "UserRoleAssignmentDB from st2common.service_setup import register_service_in_service_registry from st2common.services import coordination from", "}, # Traces { 'path': '/v1/traces', 'method': 'GET', 'is_getall': True", "'method': 'GET', 'is_getall': True }, # Service registry { 'path':", "'/v1/rules', 'method': 'POST', 'payload': MOCK_RULE_1 }, { 'path': '/v1/rules/%s' %", "self.app.get(endpoint['path'] + '?limit=-1') self.assertEqual(response.status_code, http_client.OK) def test_icon_png_file_is_whitelisted(self): self.use_user(self.users['no_permissions']) # Test", "'default': 'C1', 'position': 0}, 'd': {'type': 'string', 'default': 'D1', 'immutable':", "{ 'path': '/v1/timers/%s' % (timer_model.id), 'method': 'GET' }, # Webhooks", "config schemas { 'path': '/v1/config_schemas', 'method': 'GET', 'is_getall': True },", "self.use_user(self.users['admin']) for endpoint in supported_endpoints: if not endpoint.get('is_getall', False): continue", "'method': 'DELETE' }, # Action aliases { 'path': '/v1/actionalias', 'method':", "# Other files should return forbidden response = self.app.get('/v1/packs/views/file/dummy_pack_2/pack.yaml', expect_errors=True)", "file distributed with # this work for additional information regarding", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/ruleenforcements/%s' % (enforcement_model.id),", "import triggers as trigger_service with mock.patch.object(trigger_service, 'create_trigger_type_db', mock.MagicMock()): from st2api.controllers.v1.webhooks", "continue response = self.app.get(endpoint['path'] + '?limit=-1') self.assertEqual(response.status_code, http_client.OK) def test_icon_png_file_is_whitelisted(self):", "import DUMMY_TRIGGER_DICT http_client = six.moves.http_client __all__ = [ 'APIControllersRBACTestCase' ]", "'/v1/actions', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/actions/wolfpack.action-1', 'method':", "super(APIControllersRBACTestCase, cls).setUpClass() cls.coordinator = coordination.get_coordinator(use_cache=False) # Register mock service in", "coordination from st2tests import config as tests_config from st2tests.fixturesloader import", "'?limit=-1', expect_errors=True) msg = '%s \"%s\" didn\\'t return 403 status", "a user which has no permissions and makes sure API", "for additional information regarding copyright ownership. # The ASF licenses", "self.models['executions']['execution1.yaml'] trace_model = self.models['traces']['trace_for_test_enforce.yaml'] timer_model = self.models['triggers']['cron1.yaml'] supported_endpoints = [", "'GET' }, # RBAC - permission types { 'path': '/v1/rbac/permission_types',", "'method': 'GET' }, { 'path': '/v1/executions/%s/output' % (execution_model.id), 'method': 'GET'", "'/v1/executions', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/executions/%s' %", "{ 'path': '/v1/executions/%s/children' % (execution_model.id), 'method': 'GET' }, # Alias", "(execution_model.id), 'method': 'POST', # re-run execution 'payload': {'parameters': {}} },", "ASF licenses this file to You under the Apache License,", "'path': '/v1/actions/wolfpack.action-1', 'method': 'PUT', 'payload': MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1',", "'path': '/v1/rbac/permission_types', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/rbac/permission_types/action',", "}, 'action': { 'ref': 'sixpack.st2.test.action', 'parameters': { 'ip2': '{{rule.k1}}', 'ip1':", "super(APIControllersRBACTestCase, self).setUp() # Register packs if self.register_packs: self._register_packs() # Insert", "{ 'path': '/v1/runnertypes', 'method': 'GET', 'is_getall': True }, { 'path':", "] FIXTURES_PACK = 'generic' TEST_FIXTURES = OrderedDict([ ('runners', ['testrunner1.yaml', 'run-local.yaml']),", "{ 'path': '/v1/webhooks/st2', 'method': 'POST', 'payload': { 'trigger': 'some', 'payload':", "'local-shell-script', 'parameters': { 'c': {'type': 'string', 'default': 'C1', 'position': 0},", "def setUp(self): super(APIControllersRBACTestCase, self).setUp() # Register packs if self.register_packs: self._register_packs()", "True }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'GET' }, { 'path':", "'/v1/actions/wolfpack.action-1', 'method': 'DELETE' }, # Action aliases { 'path': '/v1/actionalias',", "implied. # See the License for the specific language governing", "coordinator = None @classmethod def setUpClass(cls): tests_config.parse_args(coordinator_noop=True) super(APIControllersRBACTestCase, cls).setUpClass() cls.coordinator", "'method': 'GET' }, # RBAC - permission types { 'path':", "response = self.app.get(endpoint['path'] + '?limit=-1') self.assertEqual(response.status_code, http_client.OK) def test_icon_png_file_is_whitelisted(self): self.use_user(self.users['no_permissions'])", "MOCK_ACTION_1 = { 'name': 'ma.dummy.action', 'pack': 'examples', 'description': 'test description',", "assignments { 'path': '/v1/rbac/role_assignments', 'method': 'GET', 'is_getall': True }, {", "'/v1/actions/views/overview', 'method': 'GET', 'is_getall': True }, # Rule views {", "'payload': {'query': 'cloud'} }, { 'path': '/v1/packs/index/health', 'method': 'GET' },", "(sensor_model.ref), 'method': 'GET' }, { 'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method':", "in supported_endpoints: response = self._perform_request_for_endpoint(endpoint=endpoint) msg = '%s \"%s\" didn\\'t", "which are behind the RBAC wall with a user which", "'method': 'POST', 'payload': MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'PUT',", "} ] self.use_user(self.users['no_permissions']) for endpoint in supported_endpoints: response = self._perform_request_for_endpoint(endpoint=endpoint)", "'method': 'GET' }, # Action Executions { 'path': '/v1/executions', 'method':", "as tests_config from st2tests.fixturesloader import FixturesLoader from open_rbac.tests import APIControllerWithRBACTestCase", "@mock.patch.object(HooksHolder, 'get_triggers_for_hook', mock.MagicMock( return_value=[DUMMY_TRIGGER_DICT])) def test_api_endpoints_behind_rbac_wall(self): # alias_model = self.models['aliases']['alias1.yaml']", "('rules', ['rule1.yaml']), ('triggertypes', ['triggertype1.yaml']), ('executions', ['execution1.yaml']), ('liveactions', ['liveaction1.yaml', 'parentliveaction.yaml', 'childliveaction.yaml']),", "user self.use_user(self.users['observer']) for endpoint in supported_endpoints: if not endpoint.get('is_getall', False):", "endpoint['payload'], expect_errors=True) elif endpoint['method'] == 'DELETE': return self.app.delete(endpoint['path'], expect_errors=True) else:", "'GET', 'is_getall': True }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'GET' },", "'path': '/v1/actions', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/actions/wolfpack.action-1',", "'/v1/rules/%s' % (rule_model.ref), 'method': 'GET' }, { 'path': '/v1/rules', 'method':", "'method': 'POST', 'payload': MOCK_RULE_1 }, { 'path': '/v1/rules/%s' % (rule_model.ref),", "objects are used to test get one, edit and delete", "by applicable law or agreed to in writing, software #", "MOCK_ACTION_ALIAS_1 = { 'name': 'alias3', 'pack': 'aliases', 'description': 'test description',", "behind the RBAC wall with a user which has no", "'/v1/traces/%s' % (trace_model.id), 'method': 'GET' }, # Timers { 'path':", "not use this file except in compliance with # the", "service = six.binary_type(six.text_type('mock_service').encode('ascii')) register_service_in_service_registry(service=service, capabilities={'key1': 'value1', 'name': 'mock_service'}, start_heart=True) @classmethod", "}, # Actions { 'path': '/v1/actions', 'method': 'GET', 'is_getall': True", "['a', 'b'] } MOCK_RULE_1 = { 'enabled': True, 'name': 'st2.test.rule2',", "# Rule views { 'path': '/v1/rules/views', 'method': 'GET', 'is_getall': True", "'path': '/v1/packs', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/packs/dummy_pack_1',", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/traces/%s' % (trace_model.id),", "{ 'path': '/v1/actionalias/aliases.alias1', 'method': 'DELETE' }, { 'path': '/v1/actionalias/match', 'method':", "}, { 'path': '/v1/rules', 'method': 'POST', 'payload': MOCK_RULE_1 }, {", "'{{trigger.t1_p}}' } }, 'description': '' } class APIControllersRBACTestCase(APIControllerWithRBACTestCase): \"\"\" Test", "# RBAC - roles { 'path': '/v1/rbac/roles', 'method': 'GET', 'is_getall':", "'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'PUT', 'payload': MOCK_RULE_1 }, {", "{ 'path': '/v1/config_schemas/dummy_pack_1', 'method': 'GET' }, { 'path': '/v1/packs/views/file/dummy_pack_1/pack.yaml', 'method':", "(body=%s)' % (endpoint['method'], endpoint['path'], response.body) self.assertEqual(response.status_code, http_client.FORBIDDEN, msg) # Also", "'path': '/v1/ruleenforcements', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/ruleenforcements/%s'", "'/v1/runnertypes/test-runner-1', 'method': 'PUT', 'payload': MOCK_RUNNER_1 }, # Packs { 'path':", "'method': 'GET' }, { 'path': '/v1/executions/%s/children' % (execution_model.id), 'method': 'GET'", "{ 'path': '/v1/actions/views/overview', 'method': 'GET', 'is_getall': True }, # Rule", "'formats': ['a', 'b'] } MOCK_RULE_1 = { 'enabled': True, 'name':", "{ 'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'PUT', 'payload': {'enabled': False}", "= self.app.get(endpoint['path'] + '?limit=-1') self.assertEqual(response.status_code, http_client.OK) def test_icon_png_file_is_whitelisted(self): self.use_user(self.users['no_permissions']) #", "from st2common.services import coordination from st2tests import config as tests_config", "}, { 'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'PUT', 'payload': {'enabled':", "{ 'path': '/v1/actionalias/aliases.alias1', 'method': 'PUT', 'payload': MOCK_ACTION_ALIAS_1 }, { 'path':", "'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'GET' }, { 'path': '/v1/rules',", "'ip2': '{{rule.k1}}', 'ip1': '{{trigger.t1_p}}' } }, 'description': '' } class", "self.role_assignment_db_model = UserRoleAssignmentDB( user='user', role='role', source='assignments/user.yaml') UserRoleAssignment.add_or_update(self.role_assignment_db_model) @mock.patch.object(HooksHolder, 'get_triggers_for_hook', mock.MagicMock(", "'is_getall': True }, # Service registry { 'path': '/v1/service_registry/groups', 'method':", "self.models['rules']['rule1.yaml'] enforcement_model = self.models['enforcements']['enforcement1.yaml'] execution_model = self.models['executions']['execution1.yaml'] trace_model = self.models['traces']['trace_for_test_enforce.yaml']", "{ 'path': '/v1/actions/wolfpack.action-1', 'method': 'GET' }, { 'path': '/v1/actions', 'method':", "{ 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'PUT', 'payload': MOCK_RULE_1 },", "'/v1/executions/%s' % (execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions/%s/output' %", "the API endpoints which are behind the RBAC wall with", "'/v1/timers', 'method': 'GET' }, { 'path': '/v1/timers/%s' % (timer_model.id), 'method':", "'trigger.k1': { 'pattern': 't1_p_v', 'type': 'equals' } }, 'action': {", "'method': 'GET' }, { 'path': '/v1/configs/dummy_pack_1', 'method': 'PUT', 'payload': {", "test ?limit=-1 - non-admin user self.use_user(self.users['observer']) for endpoint in supported_endpoints:", "open_rbac.tests import APIControllerWithRBACTestCase from tests.unit.controllers.v1.test_webhooks import DUMMY_TRIGGER_DICT http_client = six.moves.http_client", "and delete operations self.models = self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK, fixtures_dict=TEST_FIXTURES) self.role_assignment_db_model = UserRoleAssignmentDB(", "['action1.yaml', 'local.yaml']), ('aliases', ['alias1.yaml']), ('triggers', ['trigger1.yaml', 'cron1.yaml']), ('rules', ['rule1.yaml']), ('triggertypes',", "'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'GET' }, { 'path': '/v1/sensortypes/%s'", "test_icon_png_file_is_whitelisted(self): self.use_user(self.users['no_permissions']) # Test that access to icon.png file doesn't", "'/v1/executions/%s/attribute/trigger_instance' % (execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions/%s/children' %", "'t1_p_v', 'type': 'equals' } }, 'action': { 'ref': 'sixpack.st2.test.action', 'parameters':", "RBAC - user role assignments { 'path': '/v1/rbac/role_assignments', 'method': 'GET',", "'POST', 'payload': MOCK_RULE_1 }, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method':", "endpoint['method'] == 'DELETE': return self.app.delete(endpoint['path'], expect_errors=True) else: raise ValueError('Unsupported method:", "self.register_packs: self._register_packs() # Insert mock objects - those objects are", "'GET', 'is_getall': True }, { 'path': '/v1/executions/%s' % (execution_model.id), 'method':", "{ 'path': '/v1/packs/install', 'method': 'POST', 'payload': {'packs': 'libcloud'} }, {", "'path': '/v1/executions/%s/re_run' % (execution_model.id), 'method': 'POST', # re-run execution 'payload':", "'/v1/rbac/role_assignments', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/rbac/role_assignments/%s' %", "}, { 'path': '/v1/executions/%s' % (execution_model.id), 'method': 'DELETE' # stop", "# Pack views { 'path': '/v1/packs/views/files/dummy_pack_1', 'method': 'GET' }, #", "TEST_FIXTURES = OrderedDict([ ('runners', ['testrunner1.yaml', 'run-local.yaml']), ('sensors', ['sensor1.yaml']), ('actions', ['action1.yaml',", "% (sensor_model.ref), 'method': 'PUT', 'payload': {'enabled': False} }, # Actions", "'GET', 'is_getall': True }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'GET' },", "'method': 'GET' }, # RBAC - user role assignments {", "('sensors', ['sensor1.yaml']), ('actions', ['action1.yaml', 'local.yaml']), ('aliases', ['alias1.yaml']), ('triggers', ['trigger1.yaml', 'cron1.yaml']),", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'GET'", "}, { 'path': '/v1/actionalias', 'method': 'POST', 'payload': MOCK_ACTION_ALIAS_1 }, {", "msg) # Also test ?limit=-1 - non-admin user self.use_user(self.users['observer']) for", "endpoint['payload'], expect_errors=True) elif endpoint['method'] == 'PUT': return self.app.put_json(endpoint['path'], endpoint['payload'], expect_errors=True)", "}, { 'path': '/v1/executions', 'method': 'POST', 'payload': {'action': 'core.local'} #", "# alias_model = self.models['aliases']['alias1.yaml'] sensor_model = self.models['sensors']['sensor1.yaml'] rule_model = self.models['rules']['rule1.yaml']", "MOCK_RUNNER_1 = { 'name': 'test-runner-1', 'description': 'test', 'enabled': False }", "} }, # Sensors { 'path': '/v1/sensortypes', 'method': 'GET', 'is_getall':", "admin user self.use_user(self.users['admin']) for endpoint in supported_endpoints: if not endpoint.get('is_getall',", "'GET', 'is_getall': True }, # Service registry { 'path': '/v1/service_registry/groups',", "'method': 'GET' }, { 'path': '/v1/packs/views/file/dummy_pack_1/pack.yaml', 'method': 'GET' }, #", "True }, { 'path': '/v1/rbac/permission_types/action', 'method': 'GET' }, # Action", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "mock.MagicMock()): from st2api.controllers.v1.webhooks import HooksHolder from st2common.persistence.rbac import UserRoleAssignment from", "under the License. from collections import OrderedDict import six import", "('enforcements', ['enforcement1.yaml']), ('apikeys', ['apikey1.yaml']), ('traces', ['trace_for_test_enforce.yaml']) ]) MOCK_RUNNER_1 = {", "None @classmethod def setUpClass(cls): tests_config.parse_args(coordinator_noop=True) super(APIControllersRBACTestCase, cls).setUpClass() cls.coordinator = coordination.get_coordinator(use_cache=False)", "= six.binary_type(six.text_type('mock_service').encode('ascii')) register_service_in_service_registry(service=service, capabilities={'key1': 'value1', 'name': 'mock_service'}, start_heart=True) @classmethod def", "Unless required by applicable law or agreed to in writing,", "}, { 'path': '/v1/runnertypes/test-runner-1', 'method': 'GET' }, { 'path': '/v1/runnertypes/test-runner-1',", "= { 'name': 'alias3', 'pack': 'aliases', 'description': 'test description', 'action_ref':", "= [ # Runners { 'path': '/v1/runnertypes', 'method': 'GET', 'is_getall':", "'method': 'GET' }, # Pack views { 'path': '/v1/packs/views/files/dummy_pack_1', 'method':", "{'name': 'alias1', 'format': 'foo bar ponies', 'command': 'foo bar ponies',", "'payload': {'types': ['actions']} }, { 'path': '/v1/packs/index/search', 'method': 'POST', 'payload':", "FixturesLoader from open_rbac.tests import APIControllerWithRBACTestCase from tests.unit.controllers.v1.test_webhooks import DUMMY_TRIGGER_DICT http_client", "Pack configs { 'path': '/v1/configs', 'method': 'GET', 'is_getall': True },", "'POST', 'payload': {'packs': 'libcloud'} }, { 'path': '/v1/packs/uninstall', 'method': 'POST',", "self.use_user(self.users['no_permissions']) # Test that access to icon.png file doesn't require", "'GET' }, # RBAC - roles { 'path': '/v1/rbac/roles', 'method':", "'DELETE' }, # Action aliases { 'path': '/v1/actionalias', 'method': 'GET',", "= [ 'APIControllersRBACTestCase' ] FIXTURES_PACK = 'generic' TEST_FIXTURES = OrderedDict([", "Action execution nested controllers { 'path': '/v1/executions/%s/attribute/trigger_instance' % (execution_model.id), 'method':", "% (execution_model.id), 'method': 'POST', # re-run execution 'payload': {'parameters': {}}", "Runners { 'path': '/v1/runnertypes', 'method': 'GET', 'is_getall': True }, {", "}, # Webhooks { 'path': '/v1/webhooks', 'method': 'GET' }, {", "# Alias executions { 'path': '/v1/aliasexecution', 'method': 'POST', 'payload': {'name':", "the specific language governing permissions and # limitations under the", "registry { 'path': '/v1/service_registry/groups', 'method': 'GET', 'is_getall': True }, {", "and makes sure API returns access denied. \"\"\" register_packs =", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/rbac/role_assignments/%s' % (self.role_assignment_db_model['id']),", "http_client.FORBIDDEN, msg) # Also test ?limit=-1 - admin user self.use_user(self.users['admin'])", "Action Executions { 'path': '/v1/executions', 'method': 'GET', 'is_getall': True },", "'path': '/v1/packs/views/files/dummy_pack_1', 'method': 'GET' }, # Pack config schemas {", "{ 'path': '/v1/traces', 'method': 'GET', 'is_getall': True }, { 'path':", "}, { 'path': '/v1/packs/dummy_pack_1', 'method': 'GET' }, # Pack management", "applicable law or agreed to in writing, software # distributed", "True fixtures_loader = FixturesLoader() coordinator = None @classmethod def setUpClass(cls):", "'/v1/packs/dummy_pack_1', 'method': 'GET' }, # Pack management { 'path': '/v1/packs/install',", "are used to test get one, edit and delete operations", "('executions', ['execution1.yaml']), ('liveactions', ['liveaction1.yaml', 'parentliveaction.yaml', 'childliveaction.yaml']), ('enforcements', ['enforcement1.yaml']), ('apikeys', ['apikey1.yaml']),", "{ 'path': '/v1/executions', 'method': 'GET', 'is_getall': True }, { 'path':", "'method': 'POST', 'payload': {'name': 'alias1', 'format': 'foo bar ponies', 'command':", "403 status code (body=%s)' % (endpoint['method'], endpoint['path'], response.body) self.assertEqual(response.status_code, http_client.FORBIDDEN,", "'GET': response = self.app.get(endpoint['path'], expect_errors=True) elif endpoint['method'] == 'POST': return", "'GET', 'is_getall': True }, { 'path': '/v1/rbac/roles/admin', 'method': 'GET' },", "non-admin user self.use_user(self.users['observer']) for endpoint in supported_endpoints: if not endpoint.get('is_getall',", "[ 'APIControllersRBACTestCase' ] FIXTURES_PACK = 'generic' TEST_FIXTURES = OrderedDict([ ('runners',", "cls.coordinator = coordination.get_coordinator(use_cache=False) # Register mock service in the service", "'PUT', 'payload': MOCK_RUNNER_1 }, # Packs { 'path': '/v1/packs', 'method':", "['alias1.yaml']), ('triggers', ['trigger1.yaml', 'cron1.yaml']), ('rules', ['rule1.yaml']), ('triggertypes', ['triggertype1.yaml']), ('executions', ['execution1.yaml']),", "}, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'PUT', 'payload': MOCK_RULE_1", "(execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions', 'method': 'POST', 'payload':", "cls).tearDownClass() coordination.coordinator_teardown(cls.coordinator) def setUp(self): super(APIControllersRBACTestCase, self).setUp() # Register packs if", "from tests.unit.controllers.v1.test_webhooks import DUMMY_TRIGGER_DICT http_client = six.moves.http_client __all__ = [", "'/v1/rbac/role_assignments/%s' % (self.role_assignment_db_model['id']), 'method': 'GET' }, # RBAC - permission", "'path': '/v1/configs', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/configs/dummy_pack_1',", "# contributor license agreements. See the NOTICE file distributed with", "# this work for additional information regarding copyright ownership. #", "in writing, software # distributed under the License is distributed", "'description': 'test description', 'action_ref': 'core.local', 'formats': ['a', 'b'] } MOCK_RULE_1", "}, 'criteria': { 'trigger.k1': { 'pattern': 't1_p_v', 'type': 'equals' }", "% (execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions/%s/output' % (execution_model.id),", "'payload': {'name': 'alias1', 'format': 'foo bar ponies', 'command': 'foo bar", "'GET' }, # Action views { 'path': '/v1/actions/views/overview', 'method': 'GET',", "self.assertEqual(response.status_code, http_client.OK) def test_icon_png_file_is_whitelisted(self): self.use_user(self.users['no_permissions']) # Test that access to", "with # the License. You may obtain a copy of", "'pack': 'aliases', 'description': 'test description', 'action_ref': 'core.local', 'formats': ['a', 'b']", "'type': 'equals' } }, 'action': { 'ref': 'sixpack.st2.test.action', 'parameters': {", "= UserRoleAssignmentDB( user='user', role='role', source='assignments/user.yaml') UserRoleAssignment.add_or_update(self.role_assignment_db_model) @mock.patch.object(HooksHolder, 'get_triggers_for_hook', mock.MagicMock( return_value=[DUMMY_TRIGGER_DICT]))", "# Actions { 'path': '/v1/actions', 'method': 'GET', 'is_getall': True },", "'method': 'GET' }, # Pack config schemas { 'path': '/v1/config_schemas',", "setUp(self): super(APIControllersRBACTestCase, self).setUp() # Register packs if self.register_packs: self._register_packs() #", "'method': 'PUT', 'payload': { 'foo': 'bar' } }, # Sensors", "# Licensed to the StackStorm, Inc ('StackStorm') under one or", "True }, { 'path': '/v1/runnertypes/test-runner-1', 'method': 'GET' }, { 'path':", "this file to You under the Apache License, Version 2.0", "'payload': MOCK_RUNNER_1 }, # Packs { 'path': '/v1/packs', 'method': 'GET',", "action }, { 'path': '/v1/executions/%s' % (execution_model.id), 'method': 'DELETE' #", "'path': '/v1/executions/%s/output' % (execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions',", "}, # Webhook { 'path': '/v1/webhooks/st2', 'method': 'POST', 'payload': {", "MOCK_RUNNER_1 }, # Packs { 'path': '/v1/packs', 'method': 'GET', 'is_getall':", "expect_errors=True) elif endpoint['method'] == 'DELETE': return self.app.delete(endpoint['path'], expect_errors=True) else: raise", "{ 'path': '/v1/service_registry/groups', 'method': 'GET', 'is_getall': True }, { 'path':", "{ 'path': '/v1/rbac/roles/admin', 'method': 'GET' }, # RBAC - user", "{ 'path': '/v1/actionalias', 'method': 'POST', 'payload': MOCK_ACTION_ALIAS_1 }, { 'path':", "# Pack management { 'path': '/v1/packs/install', 'method': 'POST', 'payload': {'packs':", "# Service registry { 'path': '/v1/service_registry/groups', 'method': 'GET', 'is_getall': True", "in supported_endpoints: if not endpoint.get('is_getall', False): continue response = self.app.get(endpoint['path']", "'/v1/traces', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/traces/%s' %", "self.app.post_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method'] == 'PUT': return self.app.put_json(endpoint['path'], endpoint['payload'],", "APIControllerWithRBACTestCase from tests.unit.controllers.v1.test_webhooks import DUMMY_TRIGGER_DICT http_client = six.moves.http_client __all__ =", "'/v1/timers/%s' % (timer_model.id), 'method': 'GET' }, # Webhooks { 'path':", "True }, { 'path': '/v1/packs/dummy_pack_1', 'method': 'GET' }, # Pack", "'{{rule.k1}}', 'ip1': '{{trigger.t1_p}}' } }, 'description': '' } class APIControllersRBACTestCase(APIControllerWithRBACTestCase):", "permissions response = self.app.get('/v1/packs/views/file/dummy_pack_2/icon.png') self.assertEqual(response.status_code, http_client.OK) # Other files should", "'path': '/v1/service_registry/groups', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/service_registry/groups/mock_service/members',", "self.use_user(self.users['no_permissions']) for endpoint in supported_endpoints: response = self._perform_request_for_endpoint(endpoint=endpoint) msg =", "expect_errors=True) else: raise ValueError('Unsupported method: %s' % (endpoint['method'])) return response", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "Insert mock objects - those objects are used to test", "self.app.get('/v1/packs/views/file/dummy_pack_2/icon.png') self.assertEqual(response.status_code, http_client.OK) # Other files should return forbidden response", "agreements. See the NOTICE file distributed with # this work", "'GET', 'is_getall': True }, { 'path': '/v1/runnertypes/test-runner-1', 'method': 'GET' },", "UserRoleAssignmentDB( user='user', role='role', source='assignments/user.yaml') UserRoleAssignment.add_or_update(self.role_assignment_db_model) @mock.patch.object(HooksHolder, 'get_triggers_for_hook', mock.MagicMock( return_value=[DUMMY_TRIGGER_DICT])) def", "{ 'path': '/v1/packs/views/file/dummy_pack_1/pack.yaml', 'method': 'GET' }, # Pack configs {", "'' } class APIControllersRBACTestCase(APIControllerWithRBACTestCase): \"\"\" Test class which hits all", "True, 'entry_point': '/tmp/test/action2.py', 'runner_type': 'local-shell-script', 'parameters': { 'c': {'type': 'string',", "{ 'path': '/v1/packs/dummy_pack_1', 'method': 'GET' }, # Pack management {", "st2common.services import triggers as trigger_service with mock.patch.object(trigger_service, 'create_trigger_type_db', mock.MagicMock()): from", "'childliveaction.yaml']), ('enforcements', ['enforcement1.yaml']), ('apikeys', ['apikey1.yaml']), ('traces', ['trace_for_test_enforce.yaml']) ]) MOCK_RUNNER_1 =", "'command': 'foo bar ponies', 'user': 'channel', 'source_channel': 'bar'} }, #", "{ 'path': '/v1/actionalias/aliases.alias1', 'method': 'GET' }, { 'path': '/v1/actionalias', 'method':", "= True fixtures_loader = FixturesLoader() coordinator = None @classmethod def", "'GET', 'is_getall': True }, { 'path': '/v1/traces/%s' % (trace_model.id), 'method':", "'pack': 'examples', 'description': 'test description', 'enabled': True, 'entry_point': '/tmp/test/action2.py', 'runner_type':", "{ 'path': '/v1/configs/dummy_pack_1', 'method': 'GET' }, { 'path': '/v1/configs/dummy_pack_1', 'method':", "{ 'path': '/v1/rbac/permission_types', 'method': 'GET', 'is_getall': True }, { 'path':", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "('runners', ['testrunner1.yaml', 'run-local.yaml']), ('sensors', ['sensor1.yaml']), ('actions', ['action1.yaml', 'local.yaml']), ('aliases', ['alias1.yaml']),", "'some': 'thing' } } }, # Traces { 'path': '/v1/traces',", "'generic' TEST_FIXTURES = OrderedDict([ ('runners', ['testrunner1.yaml', 'run-local.yaml']), ('sensors', ['sensor1.yaml']), ('actions',", "'GET', 'is_getall': True }, { 'path': '/v1/service_registry/groups/mock_service/members', 'method': 'GET', 'is_getall':", "'bar'} }, # Webhook { 'path': '/v1/webhooks/st2', 'method': 'POST', 'payload':", "self.assertEqual(response.status_code, http_client.OK) # Other files should return forbidden response =", "'POST': return self.app.post_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method'] == 'PUT': return", "= OrderedDict([ ('runners', ['testrunner1.yaml', 'run-local.yaml']), ('sensors', ['sensor1.yaml']), ('actions', ['action1.yaml', 'local.yaml']),", "'position': 0}, 'd': {'type': 'string', 'default': 'D1', 'immutable': True} }", "{ 'path': '/v1/aliasexecution', 'method': 'POST', 'payload': {'name': 'alias1', 'format': 'foo", "if not endpoint.get('is_getall', False): continue response = self.app.get(endpoint['path'] + '?limit=-1')", "0}, 'd': {'type': 'string', 'default': 'D1', 'immutable': True} } }", "'foo bar ponies', 'command': 'foo bar ponies', 'user': 'channel', 'source_channel':", "['sensor1.yaml']), ('actions', ['action1.yaml', 'local.yaml']), ('aliases', ['alias1.yaml']), ('triggers', ['trigger1.yaml', 'cron1.yaml']), ('rules',", "@classmethod def setUpClass(cls): tests_config.parse_args(coordinator_noop=True) super(APIControllersRBACTestCase, cls).setUpClass() cls.coordinator = coordination.get_coordinator(use_cache=False) #", "Pack config schemas { 'path': '/v1/config_schemas', 'method': 'GET', 'is_getall': True", "RBAC - roles { 'path': '/v1/rbac/roles', 'method': 'GET', 'is_getall': True", "{'action': 'core.local'} # schedule execution / run action }, {", "'method': 'POST', 'payload': {'packs': 'libcloud'} }, { 'path': '/v1/packs/register', 'method':", "'path': '/v1/executions/%s' % (execution_model.id), 'method': 'DELETE' # stop execution },", "Register packs if self.register_packs: self._register_packs() # Insert mock objects -", "] self.use_user(self.users['no_permissions']) for endpoint in supported_endpoints: response = self._perform_request_for_endpoint(endpoint=endpoint) msg", "the License for the specific language governing permissions and #", "True }, { 'path': '/v1/rbac/roles/admin', 'method': 'GET' }, # RBAC", "import UserRoleAssignment from st2common.models.db.rbac import UserRoleAssignmentDB from st2common.service_setup import register_service_in_service_registry", "{ 'path': '/v1/executions', 'method': 'POST', 'payload': {'action': 'core.local'} # schedule", "See the NOTICE file distributed with # this work for", "sure API returns access denied. \"\"\" register_packs = True fixtures_loader", "'d': {'type': 'string', 'default': 'D1', 'immutable': True} } } MOCK_ACTION_ALIAS_1", "returns access denied. \"\"\" register_packs = True fixtures_loader = FixturesLoader()", "'path': '/v1/actions', 'method': 'POST', 'payload': MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1',", "'POST', 'payload': MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'PUT', 'payload':", "either express or implied. # See the License for the", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/sensortypes/%s' % (sensor_model.ref),", "'GET' }, { 'path': '/v1/timers/%s' % (timer_model.id), 'method': 'GET' },", "'GET', 'is_getall': True } ] self.use_user(self.users['no_permissions']) for endpoint in supported_endpoints:", "import six import mock from st2common.services import triggers as trigger_service", "OrderedDict([ ('runners', ['testrunner1.yaml', 'run-local.yaml']), ('sensors', ['sensor1.yaml']), ('actions', ['action1.yaml', 'local.yaml']), ('aliases',", "'get_triggers_for_hook', mock.MagicMock( return_value=[DUMMY_TRIGGER_DICT])) def test_api_endpoints_behind_rbac_wall(self): # alias_model = self.models['aliases']['alias1.yaml'] sensor_model", "'GET', 'is_getall': True }, { 'path': '/v1/rbac/role_assignments/%s' % (self.role_assignment_db_model['id']), 'method':", "to You under the Apache License, Version 2.0 # (the", "= self.models['rules']['rule1.yaml'] enforcement_model = self.models['enforcements']['enforcement1.yaml'] execution_model = self.models['executions']['execution1.yaml'] trace_model =", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "@classmethod def tearDownClass(cls): super(APIControllersRBACTestCase, cls).tearDownClass() coordination.coordinator_teardown(cls.coordinator) def setUp(self): super(APIControllersRBACTestCase, self).setUp()", "class APIControllersRBACTestCase(APIControllerWithRBACTestCase): \"\"\" Test class which hits all the API", "if endpoint['method'] == 'GET': response = self.app.get(endpoint['path'], expect_errors=True) elif endpoint['method']", "'payload': MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'DELETE' }, {", "(timer_model.id), 'method': 'GET' }, # Webhooks { 'path': '/v1/webhooks', 'method':", "or more # contributor license agreements. See the NOTICE file", "True }, { 'path': '/v1/configs/dummy_pack_1', 'method': 'GET' }, { 'path':", "True }, { 'path': '/v1/executions/%s' % (execution_model.id), 'method': 'GET' },", "'GET' }, { 'path': '/v1/executions', 'method': 'POST', 'payload': {'action': 'core.local'}", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/rbac/permission_types/action', 'method': 'GET'", "}, { 'path': '/v1/configs/dummy_pack_1', 'method': 'GET' }, { 'path': '/v1/configs/dummy_pack_1',", "You under the Apache License, Version 2.0 # (the \"License\");", "self._perform_request_for_endpoint(endpoint=endpoint) msg = '%s \"%s\" didn\\'t return 403 status code", "'parameters': { 'ip2': '{{rule.k1}}', 'ip1': '{{trigger.t1_p}}' } }, 'description': ''", "== 'PUT': return self.app.put_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method'] == 'DELETE':", "coordination.coordinator_teardown(cls.coordinator) def setUp(self): super(APIControllersRBACTestCase, self).setUp() # Register packs if self.register_packs:", "mock objects - those objects are used to test get", "six.binary_type(six.text_type('mock_service').encode('ascii')) register_service_in_service_registry(service=service, capabilities={'key1': 'value1', 'name': 'mock_service'}, start_heart=True) @classmethod def tearDownClass(cls):", "'path': '/v1/configs/dummy_pack_1', 'method': 'GET' }, { 'path': '/v1/configs/dummy_pack_1', 'method': 'PUT',", "bar ponies', 'command': 'foo bar ponies', 'user': 'channel', 'source_channel': 'bar'}", "tests_config.parse_args(coordinator_noop=True) super(APIControllersRBACTestCase, cls).setUpClass() cls.coordinator = coordination.get_coordinator(use_cache=False) # Register mock service", "'/v1/rbac/permission_types', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/rbac/permission_types/action', 'method':", "file doesn't require any permissions response = self.app.get('/v1/packs/views/file/dummy_pack_2/icon.png') self.assertEqual(response.status_code, http_client.OK)", "fixtures_dict=TEST_FIXTURES) self.role_assignment_db_model = UserRoleAssignmentDB( user='user', role='role', source='assignments/user.yaml') UserRoleAssignment.add_or_update(self.role_assignment_db_model) @mock.patch.object(HooksHolder, 'get_triggers_for_hook',", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "import APIControllerWithRBACTestCase from tests.unit.controllers.v1.test_webhooks import DUMMY_TRIGGER_DICT http_client = six.moves.http_client __all__", "'method': 'GET' }, # Pack management { 'path': '/v1/packs/install', 'method':", "{ 'path': '/v1/config_schemas', 'method': 'GET', 'is_getall': True }, { 'path':", "# Traces { 'path': '/v1/traces', 'method': 'GET', 'is_getall': True },", "'c': {'type': 'string', 'default': 'C1', 'position': 0}, 'd': {'type': 'string',", "http_client.FORBIDDEN, msg) # Also test ?limit=-1 - non-admin user self.use_user(self.users['observer'])", "HooksHolder from st2common.persistence.rbac import UserRoleAssignment from st2common.models.db.rbac import UserRoleAssignmentDB from", "'aliases', 'description': 'test description', 'action_ref': 'core.local', 'formats': ['a', 'b'] }", "(rule_model.ref), 'method': 'PUT', 'payload': MOCK_RULE_1 }, { 'path': '/v1/rules/%s' %", "{ 'path': '/v1/rules/views', 'method': 'GET', 'is_getall': True }, # Service", "user role assignments { 'path': '/v1/rbac/role_assignments', 'method': 'GET', 'is_getall': True", "}, { 'path': '/v1/rules/%s' % (rule_model.ref), 'method': 'GET' }, {", "'POST', 'payload': {'name': 'alias1', 'format': 'foo bar ponies', 'command': 'foo", "The ASF licenses this file to You under the Apache", "get one, edit and delete operations self.models = self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK, fixtures_dict=TEST_FIXTURES)", "'test description', 'action_ref': 'core.local', 'formats': ['a', 'b'] } MOCK_RULE_1 =", "}, { 'path': '/v1/executions/%s' % (execution_model.id), 'method': 'GET' }, {", "'path': '/v1/packs/dummy_pack_1', 'method': 'GET' }, # Pack management { 'path':", "purposes service = six.binary_type(six.text_type('mock_service').encode('ascii')) register_service_in_service_registry(service=service, capabilities={'key1': 'value1', 'name': 'mock_service'}, start_heart=True)", "'method': 'GET', 'is_getall': True }, { 'path': '/v1/executions/%s' % (execution_model.id),", "}, # Action Executions { 'path': '/v1/executions', 'method': 'GET', 'is_getall':", "def test_icon_png_file_is_whitelisted(self): self.use_user(self.users['no_permissions']) # Test that access to icon.png file", "response = self.app.get('/v1/packs/views/file/dummy_pack_2/pack.yaml', expect_errors=True) self.assertEqual(response.status_code, http_client.FORBIDDEN) def _perform_request_for_endpoint(self, endpoint): if", "with mock.patch.object(trigger_service, 'create_trigger_type_db', mock.MagicMock()): from st2api.controllers.v1.webhooks import HooksHolder from st2common.persistence.rbac", "}, { 'path': '/v1/runnertypes/test-runner-1', 'method': 'PUT', 'payload': MOCK_RUNNER_1 }, #", "files should return forbidden response = self.app.get('/v1/packs/views/file/dummy_pack_2/pack.yaml', expect_errors=True) self.assertEqual(response.status_code, http_client.FORBIDDEN)", "start_heart=True) @classmethod def tearDownClass(cls): super(APIControllersRBACTestCase, cls).tearDownClass() coordination.coordinator_teardown(cls.coordinator) def setUp(self): super(APIControllersRBACTestCase,", "management { 'path': '/v1/packs/install', 'method': 'POST', 'payload': {'packs': 'libcloud'} },", "from st2tests.fixturesloader import FixturesLoader from open_rbac.tests import APIControllerWithRBACTestCase from tests.unit.controllers.v1.test_webhooks", "'/v1/packs/views/files/dummy_pack_1', 'method': 'GET' }, # Pack config schemas { 'path':", "} } }, # Traces { 'path': '/v1/traces', 'method': 'GET',", "['enforcement1.yaml']), ('apikeys', ['apikey1.yaml']), ('traces', ['trace_for_test_enforce.yaml']) ]) MOCK_RUNNER_1 = { 'name':", "'DELETE' }, # Rule enforcements { 'path': '/v1/ruleenforcements', 'method': 'GET',", "'method': 'PUT', 'payload': MOCK_ACTION_ALIAS_1 }, { 'path': '/v1/actionalias/aliases.alias1', 'method': 'DELETE'", "'method': 'PUT', 'payload': MOCK_RUNNER_1 }, # Packs { 'path': '/v1/packs',", "}, # Timers { 'path': '/v1/timers', 'method': 'GET' }, {", "'payload': { 'foo': 'bar' } }, # Sensors { 'path':", "'source_channel': 'bar'} }, # Webhook { 'path': '/v1/webhooks/st2', 'method': 'POST',", "[ # Runners { 'path': '/v1/runnertypes', 'method': 'GET', 'is_getall': True", "= self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK, fixtures_dict=TEST_FIXTURES) self.role_assignment_db_model = UserRoleAssignmentDB( user='user', role='role', source='assignments/user.yaml') UserRoleAssignment.add_or_update(self.role_assignment_db_model)", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "(execution_model.id), 'method': 'GET' }, { 'path': '/v1/executions/%s/children' % (execution_model.id), 'method':", "- roles { 'path': '/v1/rbac/roles', 'method': 'GET', 'is_getall': True },", "elif endpoint['method'] == 'DELETE': return self.app.delete(endpoint['path'], expect_errors=True) else: raise ValueError('Unsupported", "'GET' }, # Pack config schemas { 'path': '/v1/config_schemas', 'method':", "'/v1/ruleenforcements/%s' % (enforcement_model.id), 'method': 'GET' }, # Action Executions {", "# distributed under the License is distributed on an \"AS", "'POST', 'payload': {'action': 'core.local'} # schedule execution / run action", "{ 'path': '/v1/configs', 'method': 'GET', 'is_getall': True }, { 'path':", "def tearDownClass(cls): super(APIControllersRBACTestCase, cls).tearDownClass() coordination.coordinator_teardown(cls.coordinator) def setUp(self): super(APIControllersRBACTestCase, self).setUp() #", "sensor_model = self.models['sensors']['sensor1.yaml'] rule_model = self.models['rules']['rule1.yaml'] enforcement_model = self.models['enforcements']['enforcement1.yaml'] execution_model", "# Unless required by applicable law or agreed to in", "self.models['traces']['trace_for_test_enforce.yaml'] timer_model = self.models['triggers']['cron1.yaml'] supported_endpoints = [ # Runners {", "% (enforcement_model.id), 'method': 'GET' }, # Action Executions { 'path':", "}, { 'path': '/v1/executions/%s/re_run' % (execution_model.id), 'method': 'POST', # re-run", "endpoint['path'], response.body) self.assertEqual(response.status_code, http_client.FORBIDDEN, msg) # Also test ?limit=-1 -", "for testing purposes service = six.binary_type(six.text_type('mock_service').encode('ascii')) register_service_in_service_registry(service=service, capabilities={'key1': 'value1', 'name':", "'method': 'GET', 'is_getall': True } ] self.use_user(self.users['no_permissions']) for endpoint in", "elif endpoint['method'] == 'POST': return self.app.post_json(endpoint['path'], endpoint['payload'], expect_errors=True) elif endpoint['method']", "mock service in the service registry for testing purposes service", "Test class which hits all the API endpoints which are", "the service registry for testing purposes service = six.binary_type(six.text_type('mock_service').encode('ascii')) register_service_in_service_registry(service=service,", "'path': '/v1/actionalias', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/actionalias/aliases.alias1',", "{ 'path': '/v1/ruleenforcements', 'method': 'GET', 'is_getall': True }, { 'path':", "'/v1/executions/%s/children' % (execution_model.id), 'method': 'GET' }, # Alias executions {", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "'trigger': 'some', 'payload': { 'some': 'thing' } } }, #", "}, # RBAC - roles { 'path': '/v1/rbac/roles', 'method': 'GET',", "views { 'path': '/v1/actions/views/overview', 'method': 'GET', 'is_getall': True }, #", "= self.models['enforcements']['enforcement1.yaml'] execution_model = self.models['executions']['execution1.yaml'] trace_model = self.models['traces']['trace_for_test_enforce.yaml'] timer_model =", "the License. from collections import OrderedDict import six import mock", "tearDownClass(cls): super(APIControllersRBACTestCase, cls).tearDownClass() coordination.coordinator_teardown(cls.coordinator) def setUp(self): super(APIControllersRBACTestCase, self).setUp() # Register", "'payload': {'command': 'test command string'} }, # Rules { 'path':", "False} }, # Actions { 'path': '/v1/actions', 'method': 'GET', 'is_getall':", "role assignments { 'path': '/v1/rbac/role_assignments', 'method': 'GET', 'is_getall': True },", "?limit=-1 - non-admin user self.use_user(self.users['observer']) for endpoint in supported_endpoints: if", "from open_rbac.tests import APIControllerWithRBACTestCase from tests.unit.controllers.v1.test_webhooks import DUMMY_TRIGGER_DICT http_client =", "'/v1/packs/views/file/dummy_pack_1/pack.yaml', 'method': 'GET' }, # Pack configs { 'path': '/v1/configs',", "'GET' }, # Action Executions { 'path': '/v1/executions', 'method': 'GET',", "License. You may obtain a copy of the License at", "'path': '/v1/executions/%s/children' % (execution_model.id), 'method': 'GET' }, # Alias executions", "{ 'path': '/v1/timers', 'method': 'GET' }, { 'path': '/v1/timers/%s' %", "{ 'path': '/v1/executions/%s/output' % (execution_model.id), 'method': 'GET' }, { 'path':", "You may obtain a copy of the License at #", "execution / run action }, { 'path': '/v1/executions/%s' % (execution_model.id),", "endpoint in supported_endpoints: if not endpoint.get('is_getall', False): continue response =", "}, { 'path': '/v1/executions/%s/children' % (execution_model.id), 'method': 'GET' }, #", "Other files should return forbidden response = self.app.get('/v1/packs/views/file/dummy_pack_2/pack.yaml', expect_errors=True) self.assertEqual(response.status_code,", "super(APIControllersRBACTestCase, cls).tearDownClass() coordination.coordinator_teardown(cls.coordinator) def setUp(self): super(APIControllersRBACTestCase, self).setUp() # Register packs", "'path': '/v1/rules', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/rules/%s'", "'cron1.yaml']), ('rules', ['rule1.yaml']), ('triggertypes', ['triggertype1.yaml']), ('executions', ['execution1.yaml']), ('liveactions', ['liveaction1.yaml', 'parentliveaction.yaml',", "'/v1/rules', 'method': 'GET', 'is_getall': True }, { 'path': '/v1/rules/%s' %", "# schedule execution / run action }, { 'path': '/v1/executions/%s'", "'is_getall': True }, { 'path': '/v1/rbac/roles/admin', 'method': 'GET' }, #", "'ref': 'sixpack.st2.test.action', 'parameters': { 'ip2': '{{rule.k1}}', 'ip1': '{{trigger.t1_p}}' } },", "response = self.app.get(endpoint['path'], expect_errors=True) elif endpoint['method'] == 'POST': return self.app.post_json(endpoint['path'],", "one or more # contributor license agreements. See the NOTICE", "work for additional information regarding copyright ownership. # The ASF", "'description': 'test', 'enabled': False } MOCK_ACTION_1 = { 'name': 'ma.dummy.action',", "self.models['aliases']['alias1.yaml'] sensor_model = self.models['sensors']['sensor1.yaml'] rule_model = self.models['rules']['rule1.yaml'] enforcement_model = self.models['enforcements']['enforcement1.yaml']", "{ 'path': '/v1/sensortypes/%s' % (sensor_model.ref), 'method': 'GET' }, { 'path':", "(trace_model.id), 'method': 'GET' }, # Timers { 'path': '/v1/timers', 'method':", "'GET' }, { 'path': '/v1/executions/%s/children' % (execution_model.id), 'method': 'GET' },", "['triggertype1.yaml']), ('executions', ['execution1.yaml']), ('liveactions', ['liveaction1.yaml', 'parentliveaction.yaml', 'childliveaction.yaml']), ('enforcements', ['enforcement1.yaml']), ('apikeys',", "'GET' }, { 'path': '/v1/runnertypes/test-runner-1', 'method': 'PUT', 'payload': MOCK_RUNNER_1 },", "{'parameters': {}} }, # Action execution nested controllers { 'path':", "'method': 'GET' }, # Webhooks { 'path': '/v1/webhooks', 'method': 'GET'", "'GET' }, # RBAC - user role assignments { 'path':", "= 'generic' TEST_FIXTURES = OrderedDict([ ('runners', ['testrunner1.yaml', 'run-local.yaml']), ('sensors', ['sensor1.yaml']),", "st2common.models.db.rbac import UserRoleAssignmentDB from st2common.service_setup import register_service_in_service_registry from st2common.services import", "alias_model = self.models['aliases']['alias1.yaml'] sensor_model = self.models['sensors']['sensor1.yaml'] rule_model = self.models['rules']['rule1.yaml'] enforcement_model", "{'query': 'cloud'} }, { 'path': '/v1/packs/index/health', 'method': 'GET' }, #", "'GET' }, { 'path': '/v1/actions', 'method': 'POST', 'payload': MOCK_ACTION_1 },", "Executions { 'path': '/v1/executions', 'method': 'GET', 'is_getall': True }, {", "'method': 'POST', 'payload': {'packs': 'libcloud'} }, { 'path': '/v1/packs/uninstall', 'method':", "% (sensor_model.ref), 'method': 'GET' }, { 'path': '/v1/sensortypes/%s' % (sensor_model.ref),", "}, { 'path': '/v1/actionalias/match', 'method': 'POST', 'payload': {'command': 'test command", "'name': 'ma.dummy.action', 'pack': 'examples', 'description': 'test description', 'enabled': True, 'entry_point':", "{ 'path': '/v1/rbac/role_assignments/%s' % (self.role_assignment_db_model['id']), 'method': 'GET' }, # RBAC", "msg) # Also test ?limit=-1 - admin user self.use_user(self.users['admin']) for", "{ 'path': '/v1/actions/wolfpack.action-1', 'method': 'PUT', 'payload': MOCK_ACTION_1 }, { 'path':", "= self.models['traces']['trace_for_test_enforce.yaml'] timer_model = self.models['triggers']['cron1.yaml'] supported_endpoints = [ # Runners", "False): continue response = self.app.get(endpoint['path'] + '?limit=-1', expect_errors=True) msg =", "import config as tests_config from st2tests.fixturesloader import FixturesLoader from open_rbac.tests", "MOCK_ACTION_1 }, { 'path': '/v1/actions/wolfpack.action-1', 'method': 'DELETE' }, # Action" ]
[ "assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"myabtest\",None,API_AMBASSADOR) res = r.json()", "myabtest --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply", "k8s_utils import * def wait_for_shutdown(deploymentName): ret = run(\"kubectl get deploy/\"+deploymentName,", "myabtest', shell=True) wait_for_rollout(\"myabtest-myabtest-41de5b8\") wait_for_rollout(\"myabtest-myabtest-df66c5c\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST)", "script with 4 API methods def test_single_model(self): run('cd my-model &&", "time import subprocess from subprocess import run,Popen from seldon_utils import", "r = grpc_request_ambassador2(\"myabtest\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model", "== 1 r = grpc_request_ambassador2(\"mymab\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r)", "ks generate seldon-serve-simple-v1alpha2 mymodel --image seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks", "ks component rm mymodel', shell=True) run('kubectl delete sdep --all', shell=True)", "MAB Test model helm script with 4 API methods def", "wait_for_rollout(\"mymodel-mymodel-025d03d\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res = r.json()", "= grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model && ks delete default -c", "== 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"mymab\",None,API_AMBASSADOR) res", "singe model helm script with 4 API methods def test_single_model(self):", "--oauthSecret=oauth-secret && ks apply default -c mymab', shell=True) wait_for_rollout(\"mymab-mymab-41de5b8\") wait_for_rollout(\"mymab-mymab-b8038b2\")", "status deploy/\"+deploymentName, shell=True) def initial_rest_request(): r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not", "grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model && ks delete default && ks", "deploy/\"+deploymentName, shell=True) def wait_for_rollout(deploymentName): ret = run(\"kubectl rollout status deploy/\"+deploymentName,", "r @pytest.mark.usefixtures(\"seldon_java_images\") @pytest.mark.usefixtures(\"single_namespace_seldon_ksonnet\") class TestSingleNamespace(object): # Test singe model helm", "--image seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c mymodel',", "subprocess from subprocess import run,Popen from seldon_utils import * from", "mymodel && ks component rm mymodel', shell=True) # Test AB", "shell=True) while ret.returncode == 0: time.sleep(1) ret = run(\"kubectl get", "= run(\"kubectl get deploy/\"+deploymentName, shell=True) while ret.returncode == 0: time.sleep(1)", "methods def test_mab_model(self): run('cd my-model && ks delete default &&", "&& ks generate seldon-abtest-v1alpha2 myabtest --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key", "shell=True) wait_for_rollout(\"mymab-mymab-41de5b8\") wait_for_rollout(\"mymab-mymab-b8038b2\") wait_for_rollout(\"mymab-mymab-df66c5c\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST)", "ks delete default && ks component rm mymab', shell=True) run('kubectl", "r.json() print(res) assert r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1", "r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"mymab\",None,API_AMBASSADOR)", "200: time.sleep(5) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) return r @pytest.mark.usefixtures(\"seldon_java_images\") @pytest.mark.usefixtures(\"single_namespace_seldon_ksonnet\") class", "&& ks delete default -c myabtest && ks component rm", "with 4 API methods def test_mab_model(self): run('cd my-model && ks", "ret = run(\"kubectl rollout status deploy/\"+deploymentName, shell=True) while ret.returncode >", "--oauthSecret=oauth-secret && ks apply default -c mymodel', shell=True, check=True) wait_for_rollout(\"mymodel-mymodel-025d03d\")", "ks delete default && ks component rm mymodel', shell=True) run('kubectl", "from subprocess import run,Popen from seldon_utils import * from k8s_utils", "&& ks apply default -c mymab', shell=True) wait_for_rollout(\"mymab-mymab-41de5b8\") wait_for_rollout(\"mymab-mymab-b8038b2\") wait_for_rollout(\"mymab-mymab-df66c5c\")", "seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c", "run(\"kubectl get deploy/\"+deploymentName, shell=True) def wait_for_rollout(deploymentName): ret = run(\"kubectl rollout", "wait_for_shutdown(deploymentName): ret = run(\"kubectl get deploy/\"+deploymentName, shell=True) while ret.returncode ==", "shell=True) # Test AB Test model helm script with 4", "200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"mymodel\",None,API_AMBASSADOR) res =", "delete default && ks component rm mymodel', shell=True) run('kubectl delete", "rm myabtest', shell=True) # Test MAB Test model helm script", "assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"myabtest\",None,API_AMBASSADOR) print(r) r =", "run(\"kubectl rollout status deploy/\"+deploymentName, shell=True) while ret.returncode > 0: time.sleep(1)", "rollout status deploy/\"+deploymentName, shell=True) def initial_rest_request(): r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if", "--imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c myabtest',", "my-model && ks delete default -c mymodel && ks component", "seldon-mab-v1alpha2 mymab --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks", "= initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res = r.json() print(res) assert", "Test MAB Test model helm script with 4 API methods", "== 1 r = grpc_request_ambassador2(\"myabtest\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r)", "my-model && ks delete default -c myabtest && ks component", "wait_for_rollout(\"mymab-mymab-41de5b8\") wait_for_rollout(\"mymab-mymab-b8038b2\") wait_for_rollout(\"mymab-mymab-df66c5c\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res", "time.sleep(1) ret = run(\"kubectl rollout status deploy/\"+deploymentName, shell=True) def initial_rest_request():", "myabtest', shell=True) # Test MAB Test model helm script with", "rest_request_ambassador(\"mymab\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code == 200 assert", "seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c mymodel', shell=True,", "200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"mymodel\",None,API_AMBASSADOR) print(r) r", "helm script with 4 API methods def test_single_model(self): run('cd my-model", "model helm script with 4 API methods def test_single_model(self): run('cd", "script with 4 API methods def test_abtest_model(self): run('cd my-model &&", "helm script with 4 API methods def test_abtest_model(self): run('cd my-model", "import time import subprocess from subprocess import run,Popen from seldon_utils", "ks apply default -c mymodel', shell=True, check=True) wait_for_rollout(\"mymodel-mymodel-025d03d\") r =", "run('kubectl delete sdep --all', shell=True) run('cd my-model && ks generate", "apply default -c myabtest', shell=True) wait_for_rollout(\"myabtest-myabtest-41de5b8\") wait_for_rollout(\"myabtest-myabtest-df66c5c\") r = initial_rest_request()", "4 API methods def test_single_model(self): run('cd my-model && ks delete", "= rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code == 200: time.sleep(5) r =", "time.sleep(1) ret = run(\"kubectl get deploy/\"+deploymentName, shell=True) def wait_for_rollout(deploymentName): ret", "wait_for_rollout(deploymentName): ret = run(\"kubectl rollout status deploy/\"+deploymentName, shell=True) while ret.returncode", "--all', shell=True) run('cd my-model && ks generate seldon-mab-v1alpha2 mymab --imageA", "def test_single_model(self): run('cd my-model && ks delete default && ks", "shell=True, check=True) wait_for_rollout(\"mymodel-mymodel-025d03d\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res", "shell=True) run('cd my-model && ks generate seldon-abtest-v1alpha2 myabtest --imageA seldonio/mock_classifier:1.0", "model helm script with 4 API methods def test_mab_model(self): run('cd", "ks generate seldon-mab-v1alpha2 mymab --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret", "AB Test model helm script with 4 API methods def", "generate seldon-abtest-v1alpha2 myabtest --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret &&", "== 1 r = rest_request_ambassador(\"mymodel\",None,API_AMBASSADOR) res = r.json() print(res) assert", "&& ks apply default -c mymodel', shell=True, check=True) wait_for_rollout(\"mymodel-mymodel-025d03d\") r", "mymab --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply", "run('cd my-model && ks generate seldon-mab-v1alpha2 mymab --imageA seldonio/mock_classifier:1.0 --imageB", "&& ks delete default -c mymodel && ks component rm", "shell=True) while ret.returncode > 0: time.sleep(1) ret = run(\"kubectl rollout", "r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"mymab\",None,API_AMBASSADOR)", "run('cd my-model && ks generate seldon-abtest-v1alpha2 myabtest --imageA seldonio/mock_classifier:1.0 --imageB", "rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code == 200: time.sleep(5) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST)", "r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"mymodel\",None,API_AMBASSADOR)", "r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"myabtest\",None,API_AMBASSADOR)", "sdep --all', shell=True) run('cd my-model && ks generate seldon-serve-simple-v1alpha2 mymodel", "1 r = grpc_request_ambassador2(\"mymodel\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd", "while ret.returncode == 0: time.sleep(1) ret = run(\"kubectl get deploy/\"+deploymentName,", "def wait_for_shutdown(deploymentName): ret = run(\"kubectl get deploy/\"+deploymentName, shell=True) while ret.returncode", "methods def test_single_model(self): run('cd my-model && ks delete default &&", "generate seldon-serve-simple-v1alpha2 mymodel --image seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply", "= rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) return r @pytest.mark.usefixtures(\"seldon_java_images\") @pytest.mark.usefixtures(\"single_namespace_seldon_ksonnet\") class TestSingleNamespace(object): # Test", "assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"mymodel\",None,API_AMBASSADOR) print(r) r =", "import pytest import time import subprocess from subprocess import run,Popen", "model helm script with 4 API methods def test_abtest_model(self): run('cd", "print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model && ks delete", "delete sdep --all', shell=True) run('cd my-model && ks generate seldon-abtest-v1alpha2", "== 0: time.sleep(1) ret = run(\"kubectl get deploy/\"+deploymentName, shell=True) def", "delete sdep --all', shell=True) run('cd my-model && ks generate seldon-mab-v1alpha2", "@pytest.mark.usefixtures(\"single_namespace_seldon_ksonnet\") class TestSingleNamespace(object): # Test singe model helm script with", "r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code == 200: time.sleep(5) r", "&& ks component rm myabtest', shell=True) # Test MAB Test", "# Test singe model helm script with 4 API methods", "&& ks component rm mymodel', shell=True) run('kubectl delete sdep --all',", "--all', shell=True) run('cd my-model && ks generate seldon-serve-simple-v1alpha2 mymodel --image", "run(\"kubectl rollout status deploy/\"+deploymentName, shell=True) def initial_rest_request(): r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST)", "default && ks component rm mymab', shell=True) run('kubectl delete sdep", "assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"mymab\",None,API_AMBASSADOR) res = r.json()", "check=True) wait_for_rollout(\"mymodel-mymodel-025d03d\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res =", "== 1 r = rest_request_ambassador(\"mymab\",None,API_AMBASSADOR) res = r.json() print(res) assert", "import subprocess from subprocess import run,Popen from seldon_utils import *", "shell=True) run('cd my-model && ks generate seldon-mab-v1alpha2 mymab --imageA seldonio/mock_classifier:1.0", "myabtest && ks component rm myabtest', shell=True) # Test MAB", "r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res = r.json() print(res)", "r.status_code == 200: time.sleep(1) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code", "= run(\"kubectl rollout status deploy/\"+deploymentName, shell=True) while ret.returncode > 0:", "with 4 API methods def test_abtest_model(self): run('cd my-model && ks", "my-model && ks delete default && ks component rm mymodel',", "grpc_request_ambassador2(\"mymab\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model && ks", "r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"mymodel\",None,API_AMBASSADOR)", "from seldon_utils import * from k8s_utils import * def wait_for_shutdown(deploymentName):", "0: time.sleep(1) ret = run(\"kubectl get deploy/\"+deploymentName, shell=True) def wait_for_rollout(deploymentName):", "= grpc_request_ambassador2(\"myabtest\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model &&", "ret = run(\"kubectl get deploy/\"+deploymentName, shell=True) def wait_for_rollout(deploymentName): ret =", "rm mymodel', shell=True) run('kubectl delete sdep --all', shell=True) run('cd my-model", "--oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c mymab', shell=True) wait_for_rollout(\"mymab-mymab-41de5b8\")", "if not r.status_code == 200: time.sleep(1) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if", "-c myabtest', shell=True) wait_for_rollout(\"myabtest-myabtest-41de5b8\") wait_for_rollout(\"myabtest-myabtest-df66c5c\") r = initial_rest_request() r =", "mymab', shell=True) run('kubectl delete sdep --all', shell=True) run('cd my-model &&", "my-model && ks generate seldon-serve-simple-v1alpha2 mymodel --image seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret", "my-model && ks generate seldon-mab-v1alpha2 mymab --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0", "assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"mymodel\",None,API_AMBASSADOR) res = r.json()", "r = grpc_request_ambassador2(\"mymab\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model", "run('cd my-model && ks generate seldon-serve-simple-v1alpha2 mymodel --image seldonio/mock_classifier:1.0 --oauthKey=oauth-key", "--all', shell=True) run('cd my-model && ks generate seldon-abtest-v1alpha2 myabtest --imageA", "rest_request_ambassador(\"mymodel\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code == 200 assert", "r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"myabtest\",None,API_AMBASSADOR)", "ret = run(\"kubectl rollout status deploy/\"+deploymentName, shell=True) def initial_rest_request(): r", "delete default -c myabtest && ks component rm myabtest', shell=True)", "print(r) run('cd my-model && ks delete default && ks component", "== 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"mymab\",None,API_AMBASSADOR) print(r)", "assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"mymab\",None,API_AMBASSADOR) print(r) r =", "grpc_request_ambassador2(\"myabtest\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model && ks", "r = grpc_request_ambassador2(\"mymodel\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model", "ks delete default -c mymodel && ks component rm mymodel',", "res = r.json() print(res) assert r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"])", "def test_mab_model(self): run('cd my-model && ks delete default && ks", "grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model && ks delete default -c myabtest", "from k8s_utils import * def wait_for_shutdown(deploymentName): ret = run(\"kubectl get", "* def wait_for_shutdown(deploymentName): ret = run(\"kubectl get deploy/\"+deploymentName, shell=True) while", "my-model && ks delete default && ks component rm mymab',", "def initial_rest_request(): r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code == 200:", "apply default -c mymodel', shell=True, check=True) wait_for_rollout(\"mymodel-mymodel-025d03d\") r = initial_rest_request()", "ret.returncode == 0: time.sleep(1) ret = run(\"kubectl get deploy/\"+deploymentName, shell=True)", "ks component rm myabtest', shell=True) # Test MAB Test model", "200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"myabtest\",None,API_AMBASSADOR) print(r) r", "return r @pytest.mark.usefixtures(\"seldon_java_images\") @pytest.mark.usefixtures(\"single_namespace_seldon_ksonnet\") class TestSingleNamespace(object): # Test singe model", "&& ks delete default && ks component rm mymodel', shell=True)", "len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"mymodel\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC)", "wait_for_rollout(\"myabtest-myabtest-df66c5c\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res = r.json()", "> 0: time.sleep(1) ret = run(\"kubectl rollout status deploy/\"+deploymentName, shell=True)", "seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c mymab', shell=True)", "wait_for_rollout(\"myabtest-myabtest-41de5b8\") wait_for_rollout(\"myabtest-myabtest-df66c5c\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res =", "Test AB Test model helm script with 4 API methods", "shell=True) def wait_for_rollout(deploymentName): ret = run(\"kubectl rollout status deploy/\"+deploymentName, shell=True)", "ks apply default -c myabtest', shell=True) wait_for_rollout(\"myabtest-myabtest-41de5b8\") wait_for_rollout(\"myabtest-myabtest-df66c5c\") r =", "run('cd my-model && ks delete default -c mymodel && ks", "run('cd my-model && ks delete default -c myabtest && ks", "def wait_for_rollout(deploymentName): ret = run(\"kubectl rollout status deploy/\"+deploymentName, shell=True) while", "r = rest_request_ambassador(\"myabtest\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code ==", "deploy/\"+deploymentName, shell=True) def initial_rest_request(): r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code", "assert r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r =", "wait_for_rollout(\"mymab-mymab-df66c5c\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res = r.json()", "generate seldon-mab-v1alpha2 mymab --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret &&", "= grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model && ks delete default &&", "import * def wait_for_shutdown(deploymentName): ret = run(\"kubectl get deploy/\"+deploymentName, shell=True)", "= run(\"kubectl get deploy/\"+deploymentName, shell=True) def wait_for_rollout(deploymentName): ret = run(\"kubectl", "-c mymab', shell=True) wait_for_rollout(\"mymab-mymab-41de5b8\") wait_for_rollout(\"mymab-mymab-b8038b2\") wait_for_rollout(\"mymab-mymab-df66c5c\") r = initial_rest_request() r", "Test model helm script with 4 API methods def test_abtest_model(self):", "default -c mymodel && ks component rm mymodel', shell=True) #", "run,Popen from seldon_utils import * from k8s_utils import * def", "shell=True) # Test MAB Test model helm script with 4", "print(r) run('cd my-model && ks delete default -c myabtest &&", "get deploy/\"+deploymentName, shell=True) while ret.returncode == 0: time.sleep(1) ret =", "status deploy/\"+deploymentName, shell=True) while ret.returncode > 0: time.sleep(1) ret =", "mymodel', shell=True) run('kubectl delete sdep --all', shell=True) run('cd my-model &&", "time.sleep(1) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code == 200: time.sleep(5)", "print(r) run('cd my-model && ks delete default -c mymodel &&", "delete sdep --all', shell=True) run('cd my-model && ks generate seldon-serve-simple-v1alpha2", "1 r = rest_request_ambassador(\"mymab\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code", "rollout status deploy/\"+deploymentName, shell=True) while ret.returncode > 0: time.sleep(1) ret", "grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model && ks delete default -c mymodel", "rest_request_ambassador(\"myabtest\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code == 200 assert", "ks generate seldon-abtest-v1alpha2 myabtest --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret", "r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code == 200: time.sleep(1) r", "ks apply default -c mymab', shell=True) wait_for_rollout(\"mymab-mymab-41de5b8\") wait_for_rollout(\"mymab-mymab-b8038b2\") wait_for_rollout(\"mymab-mymab-df66c5c\") r", "rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code == 200: time.sleep(1) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST)", "test_single_model(self): run('cd my-model && ks delete default && ks component", "r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res = r.json() print(res) assert r.status_code ==", "if not r.status_code == 200: time.sleep(5) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) return", "deploy/\"+deploymentName, shell=True) while ret.returncode == 0: time.sleep(1) ret = run(\"kubectl", "ks component rm mymab', shell=True) run('kubectl delete sdep --all', shell=True)", "len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"mymab\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC)", "with 4 API methods def test_single_model(self): run('cd my-model && ks", "mymodel', shell=True, check=True) wait_for_rollout(\"mymodel-mymodel-025d03d\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST)", "component rm mymab', shell=True) run('kubectl delete sdep --all', shell=True) run('cd", "import * from k8s_utils import * def wait_for_shutdown(deploymentName): ret =", "component rm mymodel', shell=True) run('kubectl delete sdep --all', shell=True) run('cd", "component rm myabtest', shell=True) # Test MAB Test model helm", "test_mab_model(self): run('cd my-model && ks delete default && ks component", "== 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"mymodel\",None,API_AMBASSADOR) res", "helm script with 4 API methods def test_mab_model(self): run('cd my-model", "1 r = grpc_request_ambassador2(\"myabtest\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd", "1 r = rest_request_ambassador(\"myabtest\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code", "len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"mymodel\",None,API_AMBASSADOR) res = r.json() print(res)", "&& ks apply default -c myabtest', shell=True) wait_for_rollout(\"myabtest-myabtest-41de5b8\") wait_for_rollout(\"myabtest-myabtest-df66c5c\") r", "r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model && ks delete default", "component rm mymodel', shell=True) # Test AB Test model helm", "len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"myabtest\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC)", "200: time.sleep(1) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code == 200:", "shell=True) run('kubectl delete sdep --all', shell=True) run('cd my-model && ks", "ret.returncode > 0: time.sleep(1) ret = run(\"kubectl rollout status deploy/\"+deploymentName,", "len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"myabtest\",None,API_AMBASSADOR) res = r.json() print(res)", "# Test AB Test model helm script with 4 API", "default && ks component rm mymodel', shell=True) run('kubectl delete sdep", "200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"myabtest\",None,API_AMBASSADOR) res =", "200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"mymab\",None,API_AMBASSADOR) print(r) r", "200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"mymab\",None,API_AMBASSADOR) res =", "shell=True) def initial_rest_request(): r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code ==", "--oauthSecret=oauth-secret && ks apply default -c myabtest', shell=True) wait_for_rollout(\"myabtest-myabtest-41de5b8\") wait_for_rollout(\"myabtest-myabtest-df66c5c\")", "# Test MAB Test model helm script with 4 API", "shell=True) wait_for_rollout(\"myabtest-myabtest-41de5b8\") wait_for_rollout(\"myabtest-myabtest-df66c5c\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res", "def test_abtest_model(self): run('cd my-model && ks delete default && ks", "&& ks delete default && ks component rm mymab', shell=True)", "r = rest_request_ambassador(\"mymodel\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code ==", "= rest_request_ambassador(\"mymab\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code == 200", "* from k8s_utils import * def wait_for_shutdown(deploymentName): ret = run(\"kubectl", "ks delete default -c myabtest && ks component rm myabtest',", "pytest import time import subprocess from subprocess import run,Popen from", "default -c myabtest && ks component rm myabtest', shell=True) #", "test_abtest_model(self): run('cd my-model && ks delete default && ks component", "len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"mymab\",None,API_AMBASSADOR) res = r.json() print(res)", "= rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res = r.json() print(res) assert r.status_code == 200", "= run(\"kubectl rollout status deploy/\"+deploymentName, shell=True) def initial_rest_request(): r =", "= grpc_request_ambassador2(\"mymab\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model &&", "&& ks generate seldon-mab-v1alpha2 mymab --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key", "run('cd my-model && ks delete default && ks component rm", "-c myabtest && ks component rm myabtest', shell=True) # Test", "class TestSingleNamespace(object): # Test singe model helm script with 4", "= grpc_request_ambassador2(\"mymodel\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model &&", "wait_for_rollout(\"mymab-mymab-b8038b2\") wait_for_rollout(\"mymab-mymab-df66c5c\") r = initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res =", "== 1 r = grpc_request_ambassador2(\"mymodel\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r)", "4 API methods def test_abtest_model(self): run('cd my-model && ks delete", "rm mymab', shell=True) run('kubectl delete sdep --all', shell=True) run('cd my-model", "rm mymodel', shell=True) # Test AB Test model helm script", "default -c mymab', shell=True) wait_for_rollout(\"mymab-mymab-41de5b8\") wait_for_rollout(\"mymab-mymab-b8038b2\") wait_for_rollout(\"mymab-mymab-df66c5c\") r = initial_rest_request()", "run(\"kubectl get deploy/\"+deploymentName, shell=True) while ret.returncode == 0: time.sleep(1) ret", "subprocess import run,Popen from seldon_utils import * from k8s_utils import", "@pytest.mark.usefixtures(\"seldon_java_images\") @pytest.mark.usefixtures(\"single_namespace_seldon_ksonnet\") class TestSingleNamespace(object): # Test singe model helm script", "r = rest_request_ambassador(\"mymab\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code ==", "--imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c mymab',", "delete default && ks component rm mymab', shell=True) run('kubectl delete", "= r.json() print(res) assert r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) ==", "shell=True) run('cd my-model && ks generate seldon-serve-simple-v1alpha2 mymodel --image seldonio/mock_classifier:1.0", "rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) return r @pytest.mark.usefixtures(\"seldon_java_images\") @pytest.mark.usefixtures(\"single_namespace_seldon_ksonnet\") class TestSingleNamespace(object): # Test singe", "--imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default", "seldon-abtest-v1alpha2 myabtest --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks", "ks component rm mymodel', shell=True) # Test AB Test model", "API methods def test_single_model(self): run('cd my-model && ks delete default", "my-model && ks generate seldon-abtest-v1alpha2 myabtest --imageA seldonio/mock_classifier:1.0 --imageB seldonio/mock_classifier:1.0", "methods def test_abtest_model(self): run('cd my-model && ks delete default &&", "seldon_utils import * from k8s_utils import * def wait_for_shutdown(deploymentName): ret", "Test singe model helm script with 4 API methods def", "not r.status_code == 200: time.sleep(1) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not", "== 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"myabtest\",None,API_AMBASSADOR) print(r)", "sdep --all', shell=True) run('cd my-model && ks generate seldon-abtest-v1alpha2 myabtest", "== 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = rest_request_ambassador(\"myabtest\",None,API_AMBASSADOR) res", "deploy/\"+deploymentName, shell=True) while ret.returncode > 0: time.sleep(1) ret = run(\"kubectl", "== 1 r = rest_request_ambassador(\"myabtest\",None,API_AMBASSADOR) res = r.json() print(res) assert", "seldon-serve-simple-v1alpha2 mymodel --image seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default", "1 r = rest_request_ambassador(\"mymodel\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code", "API methods def test_mab_model(self): run('cd my-model && ks delete default", "== 200: time.sleep(1) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code ==", "while ret.returncode > 0: time.sleep(1) ret = run(\"kubectl rollout status", "print(res) assert r.status_code == 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r", "0: time.sleep(1) ret = run(\"kubectl rollout status deploy/\"+deploymentName, shell=True) def", "grpc_request_ambassador2(\"mymodel\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd my-model && ks", "= rest_request_ambassador(\"myabtest\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code == 200", "mymab', shell=True) wait_for_rollout(\"mymab-mymab-41de5b8\") wait_for_rollout(\"mymab-mymab-b8038b2\") wait_for_rollout(\"mymab-mymab-df66c5c\") r = initial_rest_request() r =", "API methods def test_abtest_model(self): run('cd my-model && ks delete default", "apply default -c mymab', shell=True) wait_for_rollout(\"mymab-mymab-41de5b8\") wait_for_rollout(\"mymab-mymab-b8038b2\") wait_for_rollout(\"mymab-mymab-df66c5c\") r =", "Test model helm script with 4 API methods def test_mab_model(self):", "= rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code == 200: time.sleep(1) r =", "== 200: time.sleep(5) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) return r @pytest.mark.usefixtures(\"seldon_java_images\") @pytest.mark.usefixtures(\"single_namespace_seldon_ksonnet\")", "default -c mymodel', shell=True, check=True) wait_for_rollout(\"mymodel-mymodel-025d03d\") r = initial_rest_request() r", "script with 4 API methods def test_mab_model(self): run('cd my-model &&", "-c mymodel', shell=True, check=True) wait_for_rollout(\"mymodel-mymodel-025d03d\") r = initial_rest_request() r =", "not r.status_code == 200: time.sleep(5) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) return r", "initial_rest_request() r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res = r.json() print(res) assert r.status_code", "= rest_request_ambassador(\"mymodel\",None,API_AMBASSADOR) res = r.json() print(res) assert r.status_code == 200", "&& ks generate seldon-serve-simple-v1alpha2 mymodel --image seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret &&", "seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c myabtest', shell=True)", "delete default -c mymodel && ks component rm mymodel', shell=True)", "mymodel --image seldonio/mock_classifier:1.0 --oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c", "import run,Popen from seldon_utils import * from k8s_utils import *", "4 API methods def test_mab_model(self): run('cd my-model && ks delete", "&& ks component rm mymab', shell=True) run('kubectl delete sdep --all',", "-c mymodel && ks component rm mymodel', shell=True) # Test", "get deploy/\"+deploymentName, shell=True) def wait_for_rollout(deploymentName): ret = run(\"kubectl rollout status", "time.sleep(5) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) return r @pytest.mark.usefixtures(\"seldon_java_images\") @pytest.mark.usefixtures(\"single_namespace_seldon_ksonnet\") class TestSingleNamespace(object):", "sdep --all', shell=True) run('cd my-model && ks generate seldon-mab-v1alpha2 mymab", "--oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c mymodel', shell=True, check=True)", "r.status_code == 200: time.sleep(5) r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) return r @pytest.mark.usefixtures(\"seldon_java_images\")", "initial_rest_request(): r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) if not r.status_code == 200: time.sleep(1)", "== 200 assert len(r.json()[\"data\"][\"tensor\"][\"values\"]) == 1 r = grpc_request_ambassador2(\"mymodel\",None,API_AMBASSADOR) print(r)", "1 r = grpc_request_ambassador2(\"mymab\",None,API_AMBASSADOR) print(r) r = grpc_request_api_gateway2(\"oauth-key\",\"oauth-secret\",None,rest_endpoint=API_GATEWAY_REST,grpc_endpoint=API_GATEWAY_GRPC) print(r) run('cd", "r = rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) return r @pytest.mark.usefixtures(\"seldon_java_images\") @pytest.mark.usefixtures(\"single_namespace_seldon_ksonnet\") class TestSingleNamespace(object): #", "default -c myabtest', shell=True) wait_for_rollout(\"myabtest-myabtest-41de5b8\") wait_for_rollout(\"myabtest-myabtest-df66c5c\") r = initial_rest_request() r", "--oauthKey=oauth-key --oauthSecret=oauth-secret && ks apply default -c myabtest', shell=True) wait_for_rollout(\"myabtest-myabtest-41de5b8\")", "rest_request_api_gateway(\"oauth-key\",\"oauth-secret\",None,API_GATEWAY_REST) res = r.json() print(res) assert r.status_code == 200 assert", "&& ks component rm mymodel', shell=True) # Test AB Test", "ret = run(\"kubectl get deploy/\"+deploymentName, shell=True) while ret.returncode == 0:", "TestSingleNamespace(object): # Test singe model helm script with 4 API", "mymodel', shell=True) # Test AB Test model helm script with" ]
[ "# proxy module from __future__ import absolute_import from envisage.safeweakref import", "<reponame>enthought/etsproxy # proxy module from __future__ import absolute_import from envisage.safeweakref", "proxy module from __future__ import absolute_import from envisage.safeweakref import *" ]
[ "models.ManyToManyField('Genre') class Author(models.Model): name = models.CharField(max_length=50) nationality = models.ForeignKey('Nation', null=True)", "name = models.CharField(max_length=50) nationality = models.ForeignKey('Nation', null=True) class Genre(models.Model): name", "= models.CharField(max_length=50) publication_date = models.DateTimeField() author = models.ForeignKey('Author') genres =", "django.db import models from .query import BookQuerySet class Book(models.Model): objects", "from .query import BookQuerySet class Book(models.Model): objects = BookQuerySet.as_manager() title", "models from .query import BookQuerySet class Book(models.Model): objects = BookQuerySet.as_manager()", "= models.ForeignKey('Author') genres = models.ManyToManyField('Genre') class Author(models.Model): name = models.CharField(max_length=50)", "models.CharField(max_length=50) nationality = models.ForeignKey('Nation', null=True) class Genre(models.Model): name = models.CharField(max_length=50)", "nationality = models.ForeignKey('Nation', null=True) class Genre(models.Model): name = models.CharField(max_length=50) class", "Book(models.Model): objects = BookQuerySet.as_manager() title = models.CharField(max_length=50) publication_date = models.DateTimeField()", "import models from .query import BookQuerySet class Book(models.Model): objects =", "class Genre(models.Model): name = models.CharField(max_length=50) class Nation(models.Model): name = models.CharField(max_length=50)", "models.DateTimeField() author = models.ForeignKey('Author') genres = models.ManyToManyField('Genre') class Author(models.Model): name", "objects = BookQuerySet.as_manager() title = models.CharField(max_length=50) publication_date = models.DateTimeField() author", "publication_date = models.DateTimeField() author = models.ForeignKey('Author') genres = models.ManyToManyField('Genre') class", ".query import BookQuerySet class Book(models.Model): objects = BookQuerySet.as_manager() title =", "= models.CharField(max_length=50) nationality = models.ForeignKey('Nation', null=True) class Genre(models.Model): name =", "author = models.ForeignKey('Author') genres = models.ManyToManyField('Genre') class Author(models.Model): name =", "models.CharField(max_length=50) publication_date = models.DateTimeField() author = models.ForeignKey('Author') genres = models.ManyToManyField('Genre')", "Genre(models.Model): name = models.CharField(max_length=50) class Nation(models.Model): name = models.CharField(max_length=50) demonym", "= models.ForeignKey('Nation', null=True) class Genre(models.Model): name = models.CharField(max_length=50) class Nation(models.Model):", "= BookQuerySet.as_manager() title = models.CharField(max_length=50) publication_date = models.DateTimeField() author =", "import BookQuerySet class Book(models.Model): objects = BookQuerySet.as_manager() title = models.CharField(max_length=50)", "BookQuerySet class Book(models.Model): objects = BookQuerySet.as_manager() title = models.CharField(max_length=50) publication_date", "= models.ManyToManyField('Genre') class Author(models.Model): name = models.CharField(max_length=50) nationality = models.ForeignKey('Nation',", "= models.CharField(max_length=50) class Nation(models.Model): name = models.CharField(max_length=50) demonym = models.CharField(max_length=50)", "class Author(models.Model): name = models.CharField(max_length=50) nationality = models.ForeignKey('Nation', null=True) class", "from django.db import models from .query import BookQuerySet class Book(models.Model):", "name = models.CharField(max_length=50) class Nation(models.Model): name = models.CharField(max_length=50) demonym =", "class Book(models.Model): objects = BookQuerySet.as_manager() title = models.CharField(max_length=50) publication_date =", "genres = models.ManyToManyField('Genre') class Author(models.Model): name = models.CharField(max_length=50) nationality =", "models.ForeignKey('Author') genres = models.ManyToManyField('Genre') class Author(models.Model): name = models.CharField(max_length=50) nationality", "title = models.CharField(max_length=50) publication_date = models.DateTimeField() author = models.ForeignKey('Author') genres", "BookQuerySet.as_manager() title = models.CharField(max_length=50) publication_date = models.DateTimeField() author = models.ForeignKey('Author')", "Author(models.Model): name = models.CharField(max_length=50) nationality = models.ForeignKey('Nation', null=True) class Genre(models.Model):", "null=True) class Genre(models.Model): name = models.CharField(max_length=50) class Nation(models.Model): name =", "= models.DateTimeField() author = models.ForeignKey('Author') genres = models.ManyToManyField('Genre') class Author(models.Model):", "models.ForeignKey('Nation', null=True) class Genre(models.Model): name = models.CharField(max_length=50) class Nation(models.Model): name" ]
[ "- keep_prob) * width**2 / dropblock_size**2 / (width - dropblock_size", "keepdims=True) else: ksize = [1, 1, dropblock_size, dropblock_size] block_pattern =", "C]) if data_format == 'channels_last' else tf.reshape(cam, [N, C, height,", "code from tensorpack.tfutils.tower import get_current_tower_context from tensorpack.models import GlobalAvgPooling, FullyConnected", "DropBlock. \"None\" means no DropBlock. dropblock_size: `int` size of blocks", "tf.reshape(tf.nn.sigmoid(cam), [N, 1, height, width]) else: cam_mask = False return", "tensorpack.tfutils.compat import tfv1 as tf # this should be avoided", "- dropblock_size + 1)**2 cam_mask = _get_cam(net, label, flag, dropblock_size,", "num_of_class] ''' if data_format == 'channels_last': N, height, width, C", "method for convolutional neural networks. DropBlock is a form of", "height*width]) # # cam_mean = 1 + tf.matmul(net, weights, transpose_a=True)", "seed_drop_rate), dtype=tf.float32) + randnoise) >= 1 block_pattern = tf.cast(block_pattern, dtype=tf.float32)", "return cam_mask ctx = get_current_tower_context() is_training = bool(ctx.is_training) if not", "== 'channels_last' else tf.reshape(spt_mask, [N, 1, height, width]) # channel", "parameter of DropBlcok. seed_drop_rate = (1.0 - keep_prob) * width**2", "ksize=[1, 1, dropblock_size, dropblock_size], strides=[1, 1, 1, 1], padding='VALID', data_format='NCHW')", "width**2 / dropblock_size**2 / ( width - dropblock_size + 1)**2", "'channels_last' else tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) # chan_mask = tf.reshape(cam, [N*C,", "= tf.gather(topk, indices=[k-1], axis=-1) # [N, 1, 1] spt_mask =", "parameter of DropBlcok. # seed_drop_rate = (1.0 - keep_prob) *", "[N, k] topk = tf.gather(topk, indices=k, axis=1) # [N, 1]", "= tf.reshape(spt_mask, [N, height, width, 1]) if data_format == 'channels_last'", "G == None: G = C // CG if CG", "else: _, C, width, height = net.get_shape().as_list() if width !=", "of input tensor with DropBlock applied. Raises: if width and", "1, height*width]) k = tf.cast(height*width/dropblock_size**2, tf.int32) topk, _ = tf.math.top_k(cam,", "width]) return net def CamDrop(net, keep_prob, dropblock_size, flag=None, label=None, G=None,", "valid_block_center = tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims( valid_block_center, -1 if", "width*height] # cam_chan = tf.maximum(tf.multiply(net, weights), 0) # [N, C,", "label, flag, dropblock_size, data_format='channels_first'): ''' net: [N, C, H, W]", "# [N, C, 1, 1] cam_mask = tf.logical_or(spt_mask, chan_mask) #", "# for var, grad in zip(var_list, grads)] # # grads", "0) valid_block_center = tf.expand_dims( valid_block_center, -1 if data_format == 'channels_last'", "tf.int32) topk, _ = tf.math.top_k(cam, k=k) # [N, 1, k]", "chan_mask) # chan_mask = tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) if data_format ==", "than dropout on convolutional layers due to the fact that", "is_training: `bool` for whether the model is training. keep_prob: `float`", "this should be avoided first in model code from tensorpack.tfutils.tower", "tf.float32) net = net / tf.cast(percent_ones, net.dtype) * tf.cast( block_pattern,", "avoided first in model code from tensorpack.tfutils.tower import get_current_tower_context from", "be avoided first in model code from tensorpack.tfutils.tower import get_current_tower_context", "C, height, width = net.get_shape().as_list() N = tf.shape(net)[0] gap_w =", "width != height: raise ValueError('Input tensor with width!=height is not", "else: ksize = [1, 1, dropblock_size, dropblock_size] block_pattern = tf.reduce_max(-block_pattern,", "the model is training. keep_prob: `float` or `Tensor` keep_prob parameter", ">= 1 block_pattern = tf.logical_or(block_pattern, cam_mask) block_pattern = tf.cast(block_pattern, dtype=tf.float32)", "else tf.reshape(cam, [N*C, height*width]) # chan_mask = tf.reshape(tf.nn.sigmoid(cam), [N, height,", "0) # for depth valid_block_center = tf.expand_dims(valid_block_center, 0) # for", "G=None, CG=None, data_format='channels_first'): '''CamDrop''' def _get_cam(net, label, flag, dropblock_size, data_format='channels_first'):", "G, CG, height, width]) dropblock_size = min(dropblock_size, width) # seed_drop_rate", "means no DropBlock. dropblock_size: `int` size of blocks to be", "1, width*height] # spt_mask = tf.not_equal(cam, tf.reduce_max(cam, reduction_indices=[2], keepdims=True)) #", "tf.cast(block_pattern, dtype=tf.float32) if dropblock_size == width: block_pattern = tf.reduce_min( block_pattern,", "`float` or `Tensor` keep_prob parameter of DropBlock. \"None\" means no", "seed_drop_rate is the gamma parameter of DropBlcok. # seed_drop_rate =", "cam_mask = tf.logical_or(spt_mask, chan_mask) # chan_mask = tf.reshape(tf.nn.softmax(cam), [N*C, height*width])", "* (width - dropblock_size + 1)**2) seed_drop_rate = (1.0 -", "shape, seed_drop_rate, eps=1e-20): # if logits == False: # return", "- keep_prob) * width**2 * G**2 / (C * dropblock_size**2)", "the input tensor are not equal. \"\"\" ctx = get_current_tower_context()", "2: group dropout; 3: group soft-dropout; 4: Uout group dropout", "tf.reshape(cam, [N, 1, height, width]) # cam = tf.nn.avg_pool(cam, ksize=[1,", "'channels_last' else tf.reshape(cam, [N*C, height*width]) # chan_mask = tf.reshape(tf.nn.sigmoid(cam), [N,", "= net.get_shape().as_list() if width != height: raise ValueError('Input tensor with", "net: `Tensor` input tensor. is_training: `bool` for whether the model", "k=k+1) # [N, k] topk = tf.gather(topk, indices=k, axis=1) #", "1) // 2)) valid_block_center = tf.expand_dims(valid_block_center, 0) # for depth", "width, height, _ = net.get_shape().as_list() else: _, _, width, height", "blocks to be dropped by DropBlock. data_format: `str` either \"channels_first\"", "< topk) spt_mask = tf.reshape(spt_mask, [N, height, width, 1]) if", "# else: # N, C, height, width = net.get_shape().as_list() #", "group soft-dropout; 4: Uout group dropout def dropblock(net, keep_prob, dropblock_size,", "not equal. \"\"\" ctx = get_current_tower_context() is_training = bool(ctx.is_training) if", "cam_mask = tf.nn.softmax(y / tau) # topk, _ = tf.math.top_k(cam_mask,", "[C, gap_C//C, num]) gap_w = tf.reduce_mean(gap_w, reduction_indices=[1]) # [C, num]", "supported.') dropblock_size = min(dropblock_size, width) # seed_drop_rate is the gamma", "# Normalizing the gradients # if data_format == 'channels_last': #", "= tf.gradients(tensor, var_list) # return [grad if grad is not", "int(dropblock_size // 2), w_i < width - (dropblock_size - 1)", "axis=[2, 3, 4], keepdims=True) else: ksize = [1, 1, dropblock_size,", "axis=-1) # [N, 1, 1] spt_mask = (cam < topk)", "= tf.pad(cam, [[0, 0], [0, 0], [left_or_top, right_or_bot], [left_or_top, right_or_bot]])", "__all__ = ['dropblock', 'dropblock2','dropblock3','dropblock4'] # 1: paper baseline; 2: group", "# # Conv layer tensor [?,2048,10,10] # def _compute_gradients(tensor, var_list):", "''' net: [N, C, H, W] gap_w : [gap_C, num_of_class]", "_, width, height = net.get_shape().as_list() if width != height: raise", "topk, _ = tf.math.top_k(label, k=k+1) # [N, k] topk =", "if not gap_w is None: # # Normalizing the gradients", "else tf.reshape(tf.nn.sigmoid(cam), [N, 1, height, width]) else: cam_mask = False", "if data_format == 'channels_last' else tf.reshape(net, [N, C, height*width]) #", "height, width = net.get_shape().as_list() N = tf.shape(net)[0] gap_w = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,", "axis=[1, 2] if data_format == 'channels_last' else [2, 3], keepdims=True)", "# N = tf.shape(net)[0] # grads = _compute_gradients(cost, [net])[0] #", "== width: block_pattern = tf.reduce_min(block_pattern, axis=[2, 3, 4], keepdims=True) else:", "gap_C, num = tf.squeeze(gap_w, 0).get_shape().as_list() # [gap_C, num] gap_w =", "ValueError('Input tensor with width!=height is not supported.') dropblock_size = min(dropblock_size,", "tf.constant(1e-5)) # weights = tf.reduce_mean(norm_grads, reduction_indices=[2,3]) # [N, C] #", "inside the feature map. w_i, h_i = tf.meshgrid(tf.range(width), tf.range(width)) valid_block_center", "mimic GN \"\"\" ctx = get_current_tower_context() is_training = bool(ctx.is_training) if", "# [N, C, 1] # cam_mask = tf.expand_dims(cam_mask, 2) #", "GlobalAvgPooling, FullyConnected import tensorflow as tf __all__ = ['dropblock', 'dropblock2','dropblock3','dropblock4']", "tf.expand_dims(weights, 2) # [N, C, 1] # net = tf.reshape(net,", "# [N, C, width*height] # cam = cam_mean*cam_chan # #", "C, 1, 1] # return cam_mask ctx = get_current_tower_context() is_training", "dropblock_size, gap_w=None, label=None, G=None, CG=None, data_format='channels_first'): \"\"\"DropBlock: a regularization method", "tensor with width!=height is not supported.') N = tf.shape(net)[0] dropblock_size", "gap_w=None, data_format='channels_first'): # # Conv layer tensor [?,2048,10,10] # def", "label, flag, dropblock_size, data_format) # Forces the block to be", "2 # right_or_bot = left_or_top if dropblock_size % 2 ==", "= tf.reduce_min(block_pattern, axis=[2, 3, 4], keepdims=True) else: ksize = [1,", "dtype=tf.float32) + tf.cast((1 - seed_drop_rate), dtype=tf.float32) + randnoise) >= 1", "for `[batch, height, width, channels]`. Returns: A version of input", "percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast( tf.size(block_pattern), tf.float32) net =", "valid_block_center = tf.expand_dims(valid_block_center, 0) # for depth valid_block_center = tf.expand_dims(valid_block_center,", "dropblock_size, dropblock_size] block_pattern = tf.reduce_max(-block_pattern, reduction_indices=[2]) block_pattern = -tf.nn.max_pool(block_pattern, ksize=ksize,", "tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'linear/W') if flag > 0 else None if not", "data_format == 'channels_last' else [2, 3], keepdims=True) else: if data_format", "tf.expand_dims(block_pattern, 2) percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern), tf.float32) net", "- 1) // 2)) valid_block_center = tf.expand_dims(valid_block_center, 0) # for", "cam_mask = tf.expand_dims(cam_mask, 2) # [N, C, 1, 1] #", "a form of structured dropout, where units in a contiguous", "= tf.logical_and( tf.logical_and(w_i >= int(dropblock_size // 2), w_i < width", "tf.reshape(cam, [N, 1, height*width]) k = tf.cast(height*width/dropblock_size**2, tf.int32) topk, _", "// G net = tf.reshape(net, [N, G, CG, height, width])", "\"channels_last for `[batch, height, width, channels]`. Returns: A version of", "# cam = cam_mean*cam_chan # # Passing through ReLU #", "# if data_format == 'channels_last': # N, height, width, C", "indices=k, axis=1) # [N, 1] topk = tf.expand_dims(topk, 1) #", "ReLU # cam = cam / tf.reduce_max(cam, reduction_indices=[1,2], keepdims=True) #", "cam_chan = tf.maximum(tf.multiply(net, weights), 0) # [N, C, width*height] #", "= (1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast( (1 - seed_drop_rate),", "width, C = net.get_shape().as_list() # else: # N, C, height,", "tf.logical_and(w_i >= int(dropblock_size // 2), w_i < width - (dropblock_size", "# chan_mask = tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) if data_format == 'channels_last'", "= (label < topk) chan_mask = tf.expand_dims(chan_mask, 2) # [N,", "h_i = tf.meshgrid(tf.range(width), tf.range(width)) valid_block_center = tf.logical_and( tf.logical_and(w_i >= int(dropblock_size", "dropblock_size = min(dropblock_size, width) # seed_drop_rate is the gamma parameter", "if data_format == 'channels_last' else tf.reshape(net, [N, C, height, width])", "= tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast( tf.size(block_pattern), tf.float32) net = net", "supported.') N = tf.shape(net)[0] dropblock_size = min(dropblock_size, width) # seed_drop_rate", "net.shape {}'.format(dropblock_size, net.shape)) if data_format == 'channels_last': N, height, width,", "= tf.reshape(cam, [N*C, height*width]) if data_format == 'channels_last' else tf.reshape(cam,", "def dropblock(net, keep_prob, dropblock_size, gap_w=None, label=None, G=None, CG=None, data_format='channels_first'): \"\"\"DropBlock:", "dropblock_size, G=None, CG=None, data_format='channels_first'): \"\"\" mimic GN \"\"\" ctx =", "tf.expand_dims(valid_block_center, 0) # for channel randnoise = tf.random_uniform([N, G, 1,", "data_format='channels_first'): ''' net: [N, C, H, W] gap_w : [gap_C,", "tf.cast(C/8, tf.int32) topk, _ = tf.math.top_k(label, k=k+1) # [N, k]", "2 == 1 else dropblock_size-left_or_top-1 # cam = tf.pad(cam, [[0,", "_ = net.get_shape().as_list() else: _, _, width, height = net.get_shape().as_list()", "parameter of DropBlock. \"None\" means no DropBlock. dropblock_size: `int` size", "tf.cast(block_pattern, net.dtype) net = tf.reshape(net, [N, height, width, C]) if", "block_pattern = -tf.nn.max_pool(block_pattern, ksize=ksize, strides=[1, 1, 1, 1], padding='SAME', data_format='NCHW')", "padding='SAME', data_format='NCHW') block_pattern = tf.expand_dims(block_pattern, 2) percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32)", "= tf.not_equal(cam, tf.reduce_max(cam, reduction_indices=[2], keepdims=True)) # cam = tf.reshape(cam, [N,", "[1, dropblock_size, dropblock_size, 1] else: ksize = [1, 1, dropblock_size,", "- (dropblock_size - 1) // 2), tf.logical_and(h_i >= int(dropblock_size //", "N = tf.shape(net)[0] gap_w = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'linear/W') if flag >", "# return cam_mask ctx = get_current_tower_context() is_training = bool(ctx.is_training) if", "tf.reshape(cam, [N, height, width, 1]) if data_format == 'channels_last' else", "C, height*width]) # # cam_mean = 1 + tf.matmul(net, weights,", "(1.0 - keep_prob) * width**2 * G**2 / (C *", "division from __future__ import print_function import re import six #", "not is_training or keep_prob is None: return net tf.logging.info('Applying DropBlock:", "# cam = tf.reshape(cam, [N, height, width, C]) if data_format", "import absolute_import from __future__ import division from __future__ import print_function", "> 0 else None if not gap_w is None: gap_w", "map. w_i, h_i = tf.meshgrid(tf.range(width), tf.range(width)) valid_block_center = tf.logical_and( tf.logical_and(w_i", "tf.range(width)) valid_block_center = tf.logical_and( tf.logical_and(w_i >= int(dropblock_size // 2), w_i", "tf.pad(cam, [[0, 0], [0, 0], [left_or_top, right_or_bot], [left_or_top, right_or_bot]]) #", "# cam_mask = (cam_mask < topk) # # cam_mask =", "= (dropblock_size-1) // 2 # right_or_bot = left_or_top if dropblock_size", "label = tf.gather(tf.transpose(gap_w), label) # [N, C] # spatial weights", "chan_mask = (label < topk) chan_mask = tf.expand_dims(chan_mask, 2) #", "weights), 0) # [N, C, width*height] # cam = cam_mean*cam_chan", "# chan_mask = tf.reshape(cam, [N*C, height*width]) if data_format == 'channels_last'", "1, 1, 1], padding='VALID', data_format='NCHW') # left_or_top = (dropblock_size-1) //", "[N, 1] topk = tf.expand_dims(topk, 1) # [N, C, 1]", "block_pattern = -tf.nn.max_pool( -block_pattern, ksize=ksize, strides=[1, 1, 1, 1], padding='SAME',", "dropout on convolutional layers due to the fact that activation", "seed_drop_rate is the gamma parameter of DropBlcok. seed_drop_rate = (1.0", "[?,2048,10,10] # def _compute_gradients(tensor, var_list): # grads = tf.gradients(tensor, var_list)", "tf.reshape(net, [N, C, height*width]) # # cam_mean = 1 +", "== 'channels_last' else 0) randnoise = tf.random_uniform(tf.shape(net), dtype=tf.float32) block_pattern =", "be inside the feature map. w_i, h_i = tf.meshgrid(tf.range(width), tf.range(width))", "== 'channels_last' else tf.reshape(tf.nn.sigmoid(cam), [N, 1, height, width]) else: cam_mask", "[left_or_top, right_or_bot]]) # cam = tf.reshape(cam, [N, height*width, 1]) if", "dropblock_size] block_pattern = tf.reduce_max(-block_pattern, reduction_indices=[2]) block_pattern = -tf.nn.max_pool(block_pattern, ksize=ksize, strides=[1,", "correlated. See https://arxiv.org/pdf/1810.12890.pdf for details. Args: net: `Tensor` input tensor.", "data_format: `str` either \"channels_first\" for `[batch, channels, height, width]` or", "== 'channels_last' else tf.reshape(cam, [N, 1, height*width]) k = tf.cast(height*width/dropblock_size**2,", ": [gap_C, num_of_class] ''' if data_format == 'channels_last': N, height,", "# return cam # def _gumbel_softmax(logits, tau, shape, seed_drop_rate, eps=1e-20):", "/ dropblock_size**2 / (width - dropblock_size + 1)**2 # Forces", "= C // CG if CG == None: CG =", "gap_w = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'linear/W') if flag > 0 else None", "else 'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern), tf.float32) net", "* width**2 * G**2 / (C * dropblock_size**2) / (C", "tensorpack.models import GlobalAvgPooling, FullyConnected import tensorflow as tf __all__ =", "DropBlcok. # seed_drop_rate = (1.0 - keep_prob) * width**2 *", "# right_or_bot = left_or_top if dropblock_size % 2 == 1", "= tf.math.top_k(label, k=k+1) # [N, k] topk = tf.gather(topk, indices=k,", "_compute_gradients(cost, [net])[0] # norm_grads = tf.divide(grads, tf.sqrt(tf.reduce_mean(tf.square(grads), reduction_indices=[2,3], keepdims=True)) +", "width, C]) if data_format == 'channels_last' else tf.reshape(cam, [N, C,", "W] gap_w : [gap_C, num_of_class] ''' if data_format == 'channels_last':", "tf __all__ = ['dropblock', 'dropblock2','dropblock3','dropblock4'] # 1: paper baseline; 2:", "data_format == 'channels_last': N, height, width, C = net.get_shape().as_list() else:", "2) # [N, C, 1] # net = tf.reshape(net, [N,", "# [N, C] # spatial weights = tf.expand_dims(label, 2) #", "w_i, h_i = tf.meshgrid(tf.range(width), tf.range(width)) valid_block_center = tf.logical_and( tf.logical_and(w_i >=", "height, width, 1]) if data_format == 'channels_last' else tf.reshape(spt_mask, [N,", "= net.get_shape().as_list() # N = tf.shape(net)[0] # grads = _compute_gradients(cost,", "width]` or \"channels_last for `[batch, height, width, channels]`. Returns: A", "data_format='channels_first'): \"\"\" mimic GN \"\"\" ctx = get_current_tower_context() is_training =", "spatially correlated. See https://arxiv.org/pdf/1810.12890.pdf for details. Args: net: `Tensor` input", "for whether the model is training. keep_prob: `float` or `Tensor`", "N, C, height, width = net.get_shape().as_list() # N = tf.shape(net)[0]", "2) # [N, C, 1, 1] # return cam_mask ctx", "C, 1] # cam_mask = tf.expand_dims(cam_mask, 2) # [N, C,", "else: ksize = [1, 1, dropblock_size, dropblock_size] block_pattern = -tf.nn.max_pool(", "None: G = C // CG if CG == None:", "2) # [N, C, 1] # cam_mask = tf.expand_dims(cam_mask, 2)", "networks. DropBlock is a form of structured dropout, where units", "dtype=tf.float32) if dropblock_size == width: block_pattern = tf.reduce_min(block_pattern, axis=[2, 3,", "else tf.reshape(net, [N, C, height*width]) # # cam_mean = 1", "width**2 / dropblock_size**2 / (width - dropblock_size + 1)**2 #", "a feature map are dropped together. DropBlock works better than", "of blocks to be dropped by DropBlock. data_format: `str` either", "# # grads = tf.gradients(cost, net)[0] # if not gap_w", "else: N, C, height, width = net.get_shape().as_list() N = tf.shape(net)[0]", "= left_or_top if dropblock_size % 2 == 1 else dropblock_size-left_or_top-1", "C, 1] net = tf.reshape(net, [N, height*width, C]) if data_format", "tensor with width!=height is not supported.') if G == None:", "2)) valid_block_center = tf.expand_dims(valid_block_center, 0) # for depth valid_block_center =", "# cam = tf.pad(cam, [[0, 0], [0, 0], [left_or_top, right_or_bot],", "ksize=ksize, strides=[1, 1, 1, 1], padding='SAME', data_format='NHWC' if data_format ==", "width!=height is not supported.') if G == None: G =", "/ tf.cast(percent_ones, net.dtype) * tf.cast(block_pattern, net.dtype) net = tf.reshape(net, [N,", "None if not gap_w is None: gap_w = tf.convert_to_tensor(gap_w, tf.float32)", "tf.size(block_pattern), tf.float32) net = net / tf.cast(percent_ones, net.dtype) * tf.cast(", "- dropblock_size + 1)**2 # Forces the block to be", "[N, 1, 1] spt_mask = (cam < topk) spt_mask =", "net: [N, C, H, W] gap_w : [gap_C, num_of_class] '''", "# cam = tf.reshape(cam, [N, height*width, 1]) if data_format ==", "dropped by DropBlock. data_format: `str` either \"channels_first\" for `[batch, channels,", "1], padding='VALID', data_format='NCHW') # left_or_top = (dropblock_size-1) // 2 #", "num] gap_w = tf.reshape(gap_w, [C, gap_C//C, num]) gap_w = tf.reduce_mean(gap_w,", "dtype=tf.float32) if dropblock_size == width: block_pattern = tf.reduce_min( block_pattern, axis=[1,", "# net = tf.reshape(net, [N, height*width, C]) if data_format ==", "no DropBlock. dropblock_size: `int` size of blocks to be dropped", "'channels_last' else tf.reshape(net, [N, C, height*width]) cam = tf.matmul(weights, net,", "chan_mask = tf.reshape(tf.nn.sigmoid(cam), [N, height, width, 1]) if data_format ==", "- tf.log(-tf.log(U + eps) + eps) # cam_mask = tf.nn.softmax(y", "reduction_indices=[2]) block_pattern = -tf.nn.max_pool(block_pattern, ksize=ksize, strides=[1, 1, 1, 1], padding='SAME',", "tf.expand_dims(cam_mask, 2) # [N, C, 1] # cam_mask = tf.expand_dims(cam_mask,", "1) # [N, C, 1] chan_mask = (label < topk)", "topk = tf.expand_dims(topk, 1) # [N, C, 1] # cam_mask", "[N, height, width, 1]) if data_format == 'channels_last' else tf.reshape(cam,", "tf.matmul(weights, net, transpose_a=True) # [N, 1, width*height] # spt_mask =", "width, height], dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32) +", "GN \"\"\" ctx = get_current_tower_context() is_training = bool(ctx.is_training) if not", "channels, height, width]` or \"channels_last for `[batch, height, width, channels]`.", "DropBlock works better than dropout on convolutional layers due to", "'channels_last' else tf.reshape(cam, [N, 1, height*width]) k = tf.cast(height*width/dropblock_size**2, tf.int32)", "strides=[1, 1, 1, 1], padding='SAME', data_format='NCHW') block_pattern = tf.expand_dims(block_pattern, 2)", "1, dropblock_size, dropblock_size] block_pattern = -tf.nn.max_pool( -block_pattern, ksize=ksize, strides=[1, 1,", "1, 1] # return cam_mask ctx = get_current_tower_context() is_training =", "https://arxiv.org/pdf/1810.12890.pdf for details. Args: net: `Tensor` input tensor. is_training: `bool`", "cam_mask = (cam_mask < topk) # # cam_mask = tf.cast(tf.equal(cam_mask,", "weights = tf.expand_dims(weights, 2) # [N, C, 1] # net", "tf.reshape(net, [N, height*width, C]) if data_format == 'channels_last' else tf.reshape(net,", "DropBlock is a form of structured dropout, where units in", "maxval=1) # y = logits - tf.log(-tf.log(U + eps) +", "by DropBlock. data_format: `str` either \"channels_first\" for `[batch, channels, height,", "k] topk = tf.gather(topk, indices=[k-1], axis=-1) # [N, 1, 1]", "== 'channels_last' else tf.reshape(cam, [N, 1, height, width]) # cam", "dropout def dropblock(net, keep_prob, dropblock_size, gap_w=None, label=None, G=None, CG=None, data_format='channels_first'):", "the gradients # if data_format == 'channels_last': # N, height,", "import get_current_tower_context from tensorpack.models import GlobalAvgPooling, FullyConnected import tensorflow as", "width]) # else: # cam = 0. # return cam", "tf.random_uniform(tf.shape(net), dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast(", "data_format == 'channels_last': ksize = [1, dropblock_size, dropblock_size, 1] else:", "neural networks. DropBlock is a form of structured dropout, where", "= tf.expand_dims(topk, 1) # [N, C, 1] # cam_mask =", "cam_mask) block_pattern = tf.cast(block_pattern, dtype=tf.float32) if dropblock_size == width: block_pattern", "baseline; 2: group dropout; 3: group soft-dropout; 4: Uout group", "for depth valid_block_center = tf.expand_dims(valid_block_center, 0) # for batch valid_block_center", "width and height of the input tensor are not equal.", "width: block_pattern = tf.reduce_min( block_pattern, axis=[1, 2] if data_format ==", "tf.reshape(net, [N, G, CG, height, width]) dropblock_size = min(dropblock_size, width)", "- seed_drop_rate), dtype=tf.float32) + randnoise) >= 1 block_pattern = tf.logical_or(block_pattern,", "CG=None, data_format='channels_first'): \"\"\"DropBlock: a regularization method for convolutional neural networks.", "tf.cast(block_pattern, dtype=tf.float32) if dropblock_size == width: block_pattern = tf.reduce_min(block_pattern, axis=[2,", "net.get_shape().as_list() else: _, _, width, height = net.get_shape().as_list() if width", "dropblock_size % 2 == 1 else dropblock_size-left_or_top-1 # cam =", "regularization method for convolutional neural networks. DropBlock is a form", "dropblock_size, dropblock_size], strides=[1, 1, 1, 1], padding='VALID', data_format='NCHW') # left_or_top", "right_or_bot], [left_or_top, right_or_bot]]) # cam = tf.reshape(cam, [N, height*width, 1])", "== 'channels_last' else 'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(", "[N, C, 1] net = tf.reshape(net, [N, height*width, C]) if", "= tf.reshape(cam, [N, height, width, C]) if data_format == 'channels_last'", "[left_or_top, right_or_bot], [left_or_top, right_or_bot]]) # cam = tf.reshape(cam, [N, height*width,", "topk, _ = tf.math.top_k(cam, k=k) # [N, 1, k] topk", "net = tf.reshape(net, [N, height*width, C]) if data_format == 'channels_last'", "is not supported.') N = tf.shape(net)[0] dropblock_size = min(dropblock_size, width)", "[N, 1, height, width]) # cam = tf.nn.avg_pool(cam, ksize=[1, 1,", "See https://arxiv.org/pdf/1810.12890.pdf for details. Args: net: `Tensor` input tensor. is_training:", "1 + tf.matmul(net, weights, transpose_a=True) # [N, width*height, 1] #", "absolute_import from __future__ import division from __future__ import print_function import", "# spatial weights = tf.expand_dims(label, 2) # [N, C, 1]", "grad in zip(var_list, grads)] # # grads = tf.gradients(cost, net)[0]", "= tf.math.top_k(cam_mask, k=tf.cast(seed_drop_rate*shape[-1], tf.int32)) # [N, 1] # topk =", "G=None, CG=None, data_format='channels_first'): \"\"\"DropBlock: a regularization method for convolutional neural", "2), h_i < width - (dropblock_size - 1) // 2))", "== None: CG = C // G net = tf.reshape(net,", "1, height, width]) # cam = tf.nn.avg_pool(cam, ksize=[1, 1, dropblock_size,", "1) // 2), tf.logical_and(h_i >= int(dropblock_size // 2), h_i <", "tf.expand_dims(cam_mask, 2) # [N, C, 1, 1] # return cam_mask", "def CamDrop(net, keep_prob, dropblock_size, flag=None, label=None, G=None, CG=None, data_format='channels_first'): '''CamDrop'''", "cam = tf.pad(cam, [[0, 0], [0, 0], [left_or_top, right_or_bot], [left_or_top,", "cam_mean = 1 + tf.matmul(net, weights, transpose_a=True) # [N, width*height,", "== 1 else dropblock_size-left_or_top-1 # cam = tf.pad(cam, [[0, 0],", "= tf.reshape(tf.nn.sigmoid(cam), [N, height, width, 1]) if data_format == 'channels_last'", "# [N, C, 1] # net = tf.reshape(net, [N, height*width,", "if flag > 0 else None if not gap_w is", "height*width]) k = tf.cast(height*width/dropblock_size**2, tf.int32) topk, _ = tf.math.top_k(cam, k=k)", "= _compute_gradients(cost, [net])[0] # norm_grads = tf.divide(grads, tf.sqrt(tf.reduce_mean(tf.square(grads), reduction_indices=[2,3], keepdims=True))", "_compute_gradients(tensor, var_list): # grads = tf.gradients(tensor, var_list) # return [grad", "= tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'linear/W') if flag > 0 else None if", "= tf.random_uniform(tf.shape(net), dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32) +", "net / tf.cast(percent_ones, net.dtype) * tf.cast(block_pattern, net.dtype) net = tf.reshape(net,", "raise ValueError('Input tensor with width!=height is not supported.') dropblock_size =", "dropblock_size == width: block_pattern = tf.reduce_min(block_pattern, axis=[2, 3, 4], keepdims=True)", "N = tf.shape(net)[0] dropblock_size = min(dropblock_size, width) # seed_drop_rate is", "width, channels]`. Returns: A version of input tensor with DropBlock", "(1 - seed_drop_rate), dtype=tf.float32) + randnoise) >= 1 block_pattern =", "tau) # topk, _ = tf.math.top_k(cam_mask, k=tf.cast(seed_drop_rate*shape[-1], tf.int32)) # [N,", "width - (dropblock_size - 1) // 2), tf.logical_and(h_i >= int(dropblock_size", "= tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern), tf.float32) net = net /", "data_format='channels_first'): # # Conv layer tensor [?,2048,10,10] # def _compute_gradients(tensor,", "tf.cast( tf.size(block_pattern), tf.float32) net = net / tf.cast(percent_ones, net.dtype) *", "= tf.logical_or(block_pattern, cam_mask) block_pattern = tf.cast(block_pattern, dtype=tf.float32) if dropblock_size ==", "[N, C, height, width]) return net def CamDrop(net, keep_prob, dropblock_size,", "dropblock_size-left_or_top-1 # cam = tf.pad(cam, [[0, 0], [0, 0], [left_or_top,", "tensor [?,2048,10,10] # def _compute_gradients(tensor, var_list): # grads = tf.gradients(tensor,", "dropblock2(net, keep_prob, dropblock_size, G=None, CG=None, data_format='channels_first'): \"\"\" mimic GN \"\"\"", "- (dropblock_size - 1) // 2)) valid_block_center = tf.expand_dims(valid_block_center, 0)", "is_training = bool(ctx.is_training) if not is_training or keep_prob is None:", "# topk = tf.expand_dims(topk, 1) # [N, C, 1] #", "block_pattern = tf.reduce_max(-block_pattern, reduction_indices=[2]) block_pattern = -tf.nn.max_pool(block_pattern, ksize=ksize, strides=[1, 1,", "== 'channels_last' else tf.reshape(net, [N, C, height, width]) return net", "tf.reduce_max(-block_pattern, reduction_indices=[2]) block_pattern = -tf.nn.max_pool(block_pattern, ksize=ksize, strides=[1, 1, 1, 1],", "= tf.expand_dims(block_pattern, 2) percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern), tf.float32)", "2) # [N, C, 1] chan_mask = tf.expand_dims(chan_mask, 2) #", "= tf.expand_dims(cam_mask, 2) # [N, C, 1, 1] # return", "dropout, where units in a contiguous region of a feature", "logits # U = tf.random_uniform(tf.shape(logits), minval=0, maxval=1) # y =", "1] # net = tf.reshape(net, [N, height*width, C]) if data_format", "data_format == 'channels_last': # N, height, width, C = net.get_shape().as_list()", "net.get_shape().as_list() # N = tf.shape(net)[0] # grads = _compute_gradients(cost, [net])[0]", "height of the input tensor are not equal. \"\"\" ctx", "a regularization method for convolutional neural networks. DropBlock is a", "data_format == 'channels_last' else tf.reshape(net, [N, C, height, width]) return", "else: if data_format == 'channels_last': ksize = [1, dropblock_size, dropblock_size,", "2) # [N, C, 1] net = tf.reshape(net, [N, height*width,", "data_format == 'channels_last' else tf.reshape(spt_mask, [N, 1, height, width]) #", "for var, grad in zip(var_list, grads)] # # grads =", "[N, C, 1] chan_mask = (label < topk) chan_mask =", "else tf.reshape(cam, [N, C, height, width]) # else: # cam", "width, 1]) if data_format == 'channels_last' else tf.reshape(spt_mask, [N, 1,", "0 else None if not gap_w is None: gap_w =", "def _gumbel_softmax(logits, tau, shape, seed_drop_rate, eps=1e-20): # if logits ==", "feature map. w_i, h_i = tf.meshgrid(tf.range(width), tf.range(width)) valid_block_center = tf.logical_and(", "topk) # # cam_mask = tf.cast(tf.equal(cam_mask, tf.reduce_max(cam_mask, reduction_indices=[1], keepdims=True)), tf.float32)", "{}'.format(dropblock_size, net.shape)) if data_format == 'channels_last': N, height, width, C", "tf.zeros_like(var) # for var, grad in zip(var_list, grads)] # #", "+ eps) + eps) # cam_mask = tf.nn.softmax(y / tau)", "= cam_mean*cam_chan # # Passing through ReLU # cam =", "= tf.random_uniform([N, G, 1, width, height], dtype=tf.float32) block_pattern = (1", "* tf.cast(block_pattern, net.dtype) net = tf.reshape(net, [N, height, width, C])", "cam_mask = tf.expand_dims(cam_mask, 2) # [N, C, 1] # cam_mask", "net = net / tf.cast(percent_ones, net.dtype) * tf.cast( block_pattern, net.dtype)", "[N, 1, width*height] # spt_mask = tf.not_equal(cam, tf.reduce_max(cam, reduction_indices=[2], keepdims=True))", "1] cam_mask = tf.logical_or(spt_mask, chan_mask) # chan_mask = tf.reshape(tf.nn.softmax(cam), [N*C,", "# -*- coding: utf-8 -*- # File: dropblock.py from __future__", "[N, C, height*width]) # # cam_mean = 1 + tf.matmul(net,", "data_format == 'channels_last' else tf.reshape(cam, [N, C, height, width]) #", "== 'channels_last' else tf.reshape(cam, [N*C, height*width]) # chan_mask = tf.reshape(tf.nn.sigmoid(cam),", "tensorpack.tfutils.tower import get_current_tower_context from tensorpack.models import GlobalAvgPooling, FullyConnected import tensorflow", "tf.shape(net)[0] if width != height: raise ValueError('Input tensor with width!=height", "1)**2) seed_drop_rate = (1.0 - keep_prob) * width**2 / dropblock_size**2", "valid_block_center = tf.expand_dims(valid_block_center, 0) # for batch valid_block_center = tf.expand_dims(valid_block_center,", "with width!=height is not supported.') N = tf.shape(net)[0] dropblock_size =", "1, width*height] # cam_chan = tf.maximum(tf.multiply(net, weights), 0) # [N,", "reduction_indices=[2,3]) # [N, C] # weights = tf.expand_dims(weights, 2) #", "else tf.reshape(net, [N, C, height*width]) cam = tf.matmul(weights, net, transpose_a=True)", "= [1, dropblock_size, dropblock_size, 1] else: ksize = [1, 1,", "= tf.expand_dims(valid_block_center, -1 if data_format == 'channels_last' else 0) randnoise", "gap_w = tf.convert_to_tensor(gap_w, tf.float32) gap_C, num = tf.squeeze(gap_w, 0).get_shape().as_list() #", "// 2), h_i < width - (dropblock_size - 1) //", "cam = cam_mean*cam_chan # # Passing through ReLU # cam", "0) # for batch valid_block_center = tf.expand_dims(valid_block_center, 0) # for", "tf.meshgrid(tf.range(width), tf.range(width)) valid_block_center = tf.logical_and( tf.logical_and(w_i >= int(dropblock_size // 2),", "DropBlcok. seed_drop_rate = (1.0 - keep_prob) * width**2 / dropblock_size**2", "layers are spatially correlated. See https://arxiv.org/pdf/1810.12890.pdf for details. Args: net:", "C]) if data_format == 'channels_last' else tf.reshape(net, [N, C, height*width])", "(1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast((1 - seed_drop_rate), dtype=tf.float32) +", "A version of input tensor with DropBlock applied. Raises: if", "block_pattern = tf.expand_dims(block_pattern, 2) percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern),", "k=k) # [N, 1, k] topk = tf.gather(topk, indices=[k-1], axis=-1)", "# grads = _compute_gradients(cost, [net])[0] # norm_grads = tf.divide(grads, tf.sqrt(tf.reduce_mean(tf.square(grads),", "else: # cam = 0. # return cam # def", "'channels_last': _, width, height, C = net.get_shape().as_list() else: _, C,", "dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast((1 -", "* tf.cast( block_pattern, net.dtype) return net def dropblock2(net, keep_prob, dropblock_size,", "1, height, width]) else: cam_mask = False return cam_mask #", "C, height, width]) return net def CamDrop(net, keep_prob, dropblock_size, flag=None,", "- tf.cast(valid_block_center, dtype=tf.float32) + tf.cast( (1 - seed_drop_rate), dtype=tf.float32) +", "valid_block_center = tf.expand_dims(valid_block_center, -1 if data_format == 'channels_last' else 0)", "tf.sqrt(tf.reduce_mean(tf.square(grads), reduction_indices=[2,3], keepdims=True)) + tf.constant(1e-5)) # weights = tf.reduce_mean(norm_grads, reduction_indices=[2,3])", "tf.reshape(cam, [N, height, width, C]) if data_format == 'channels_last' else", "raise ValueError('Input tensor with width!=height is not supported.') N =", "valid_block_center = tf.logical_and( tf.logical_and(w_i >= int(dropblock_size // 2), w_i <", "1) // 2)) valid_block_center = tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims(valid_block_center,", "'channels_last': _, width, height, _ = net.get_shape().as_list() else: _, _,", "[N, height, width, C]) if data_format == 'channels_last' else tf.reshape(cam,", "if data_format == 'channels_last' else tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) # chan_mask", "0) # [N, 1, width*height] # cam_chan = tf.maximum(tf.multiply(net, weights),", "ValueError('Input tensor with width!=height is not supported.') N = tf.shape(net)[0]", "== None: G = C // CG if CG ==", "{}'.format(dropblock_size, net.shape)) if data_format == 'channels_last': _, width, height, _", "import division from __future__ import print_function import re import six", "* G**2 / (C * dropblock_size**2) / (C * (width", "tf.expand_dims(valid_block_center, 0) # for batch valid_block_center = tf.expand_dims(valid_block_center, 0) #", "return cam_mask # def _get_gradcam(net, cost=None, gap_w=None, data_format='channels_first'): # #", "ValueError('Input tensor with width!=height is not supported.') if G ==", "width**2 * G**2 / (C * dropblock_size**2) / (C *", "3, 4], keepdims=True) else: ksize = [1, 1, dropblock_size, dropblock_size]", "var, grad in zip(var_list, grads)] # # grads = tf.gradients(cost,", "import tensorflow as tf __all__ = ['dropblock', 'dropblock2','dropblock3','dropblock4'] # 1:", "block_pattern = tf.logical_or(block_pattern, cam_mask) block_pattern = tf.cast(block_pattern, dtype=tf.float32) if dropblock_size", "are dropped together. DropBlock works better than dropout on convolutional", "is a form of structured dropout, where units in a", "num]) gap_w = tf.reduce_mean(gap_w, reduction_indices=[1]) # [C, num] label =", "# Forces the block to be inside the feature map.", "of the input tensor are not equal. \"\"\" ctx =", "net / tf.cast(percent_ones, net.dtype) * tf.cast( block_pattern, net.dtype) return net", "k=tf.cast(seed_drop_rate*shape[-1], tf.int32)) # [N, 1] # topk = tf.gather(topk, indices=tf.cast(seed_drop_rate*shape[-1],", "block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast((1 - seed_drop_rate),", "not gap_w is None: gap_w = tf.convert_to_tensor(gap_w, tf.float32) gap_C, num", "_, _, width, height = net.get_shape().as_list() if width != height:", "tf.shape(net)[0] gap_w = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'linear/W') if flag > 0 else", "/ tf.cast(tf.size(block_pattern), tf.float32) net = net / tf.cast(percent_ones, net.dtype) *", "# channel k = tf.cast(C/8, tf.int32) topk, _ = tf.math.top_k(label,", "# [N, 1] topk = tf.expand_dims(topk, 1) # [N, C,", "is_training or keep_prob is None: return net tf.logging.info('Applying DropBlock: dropblock_size", "[1, 1, dropblock_size, dropblock_size] block_pattern = tf.reduce_max(-block_pattern, reduction_indices=[2]) block_pattern =", "1], padding='SAME', data_format='NCHW') block_pattern = tf.expand_dims(block_pattern, 2) percent_ones = tf.cast(tf.reduce_sum((block_pattern)),", "dtype=tf.float32) + tf.cast( (1 - seed_drop_rate), dtype=tf.float32) + randnoise) >=", "dropblock_size, dropblock_size] block_pattern = -tf.nn.max_pool( -block_pattern, ksize=ksize, strides=[1, 1, 1,", "dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast( (1", "= ['dropblock', 'dropblock2','dropblock3','dropblock4'] # 1: paper baseline; 2: group dropout;", "with width!=height is not supported.') dropblock_size = min(dropblock_size, width) #", "= tf.reduce_mean(norm_grads, reduction_indices=[2,3]) # [N, C] # weights = tf.expand_dims(weights,", "/ ( width - dropblock_size + 1)**2 # Forces the", "1] # cam_mean = tf.maximum(tf.matmul(weights, net, transpose_a=True), 0) # [N,", "# [N, k] topk = tf.gather(topk, indices=k, axis=1) # [N,", "dropblock_size {}, net.shape {}'.format(dropblock_size, net.shape)) if data_format == 'channels_last': N,", "gap_w : [gap_C, num_of_class] ''' if data_format == 'channels_last': N,", "_ = tf.math.top_k(cam_mask, k=tf.cast(seed_drop_rate*shape[-1], tf.int32)) # [N, 1] # topk", "Raises: if width and height of the input tensor are", "width*height] # spt_mask = tf.not_equal(cam, tf.reduce_max(cam, reduction_indices=[2], keepdims=True)) # cam", "1] # topk = tf.gather(topk, indices=tf.cast(seed_drop_rate*shape[-1], tf.int32)-1, axis=1) # topk", "else: # N, C, height, width = net.get_shape().as_list() # N", "# seed_drop_rate is the gamma parameter of DropBlcok. # seed_drop_rate", "= tf.reduce_mean(gap_w, reduction_indices=[1]) # [C, num] label = tf.gather(tf.transpose(gap_w), label)", "C, 1, 1] cam_mask = tf.logical_or(spt_mask, chan_mask) # chan_mask =", "False return cam_mask # def _get_gradcam(net, cost=None, gap_w=None, data_format='channels_first'): #", "k] topk = tf.gather(topk, indices=k, axis=1) # [N, 1] topk", "width*height] # cam = cam_mean*cam_chan # # Passing through ReLU", "# cam_mask = tf.expand_dims(cam_mask, 2) # [N, C, 1, 1]", "* width**2 / dropblock_size**2 / ( width - dropblock_size +", "# [N, C, 1] net = tf.reshape(net, [N, height*width, C])", "height, C = net.get_shape().as_list() else: _, C, width, height =", "net, transpose_a=True) # [N, 1, width*height] # spt_mask = tf.not_equal(cam,", "U = tf.random_uniform(tf.shape(logits), minval=0, maxval=1) # y = logits -", "tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims( valid_block_center, -1 if data_format ==", "convolutional layers are spatially correlated. See https://arxiv.org/pdf/1810.12890.pdf for details. Args:", "1]) if data_format == 'channels_last' else tf.reshape(spt_mask, [N, 1, height,", "gap_w is None: # # Normalizing the gradients # if", "else 0) randnoise = tf.random_uniform(tf.shape(net), dtype=tf.float32) block_pattern = (1 -", "# left_or_top = (dropblock_size-1) // 2 # right_or_bot = left_or_top", "[net])[0] # norm_grads = tf.divide(grads, tf.sqrt(tf.reduce_mean(tf.square(grads), reduction_indices=[2,3], keepdims=True)) + tf.constant(1e-5))", "if data_format == 'channels_last' else tf.reshape(cam, [N, 1, height*width]) k", "/ (width - dropblock_size + 1)**2 # Forces the block", "is training. keep_prob: `float` or `Tensor` keep_prob parameter of DropBlock.", "# cam_mask = tf.cast(tf.equal(cam_mask, tf.reduce_max(cam_mask, reduction_indices=[1], keepdims=True)), tf.float32) # cam_mask", "[grad if grad is not None else tf.zeros_like(var) # for", "in zip(var_list, grads)] # # grads = tf.gradients(cost, net)[0] #", "N, height, width, C = net.get_shape().as_list() # else: # N,", "from tensorpack.tfutils.tower import get_current_tower_context from tensorpack.models import GlobalAvgPooling, FullyConnected import", "-tf.nn.max_pool(block_pattern, ksize=ksize, strides=[1, 1, 1, 1], padding='SAME', data_format='NCHW') block_pattern =", "group dropout; 3: group soft-dropout; 4: Uout group dropout def", "tf.gather(topk, indices=tf.cast(seed_drop_rate*shape[-1], tf.int32)-1, axis=1) # topk = tf.expand_dims(topk, 1) #", "= tf.meshgrid(tf.range(width), tf.range(width)) valid_block_center = tf.logical_and( tf.logical_and(w_i >= int(dropblock_size //", "= tf.random_uniform(tf.shape(logits), minval=0, maxval=1) # y = logits - tf.log(-tf.log(U", "tf.cast((1 - seed_drop_rate), dtype=tf.float32) + randnoise) >= 1 block_pattern =", "width - (dropblock_size - 1) // 2)) valid_block_center = tf.expand_dims(valid_block_center,", "_get_cam(net, label, flag, dropblock_size, data_format) # Forces the block to", "2] if data_format == 'channels_last' else [2, 3], keepdims=True) else:", "width = net.get_shape().as_list() # N = tf.shape(net)[0] # grads =", "is the gamma parameter of DropBlcok. # seed_drop_rate = (1.0", "if dropblock_size % 2 == 1 else dropblock_size-left_or_top-1 # cam", "keepdims=True) # cam = tf.reshape(cam, [N, height, width, C]) if", "[N, height, width, 1]) if data_format == 'channels_last' else tf.reshape(spt_mask,", "height*width]) cam = tf.matmul(weights, net, transpose_a=True) # [N, 1, width*height]", "net tf.logging.info('Applying DropBlock: dropblock_size {}, net.shape {}'.format(dropblock_size, net.shape)) if data_format", "DropBlock. dropblock_size: `int` size of blocks to be dropped by", "'channels_last': ksize = [1, dropblock_size, dropblock_size, 1] else: ksize =", "CG if CG == None: CG = C // G", "re import six # from tensorpack.tfutils.compat import tfv1 as tf", "[N, 1, height*width]) k = tf.cast(height*width/dropblock_size**2, tf.int32) topk, _ =", "0) # [N, C, width*height] # cam = cam_mean*cam_chan #", "width, 1]) if data_format == 'channels_last' else tf.reshape(tf.nn.sigmoid(cam), [N, 1,", "block_pattern = tf.cast(block_pattern, dtype=tf.float32) if dropblock_size == width: block_pattern =", "batch valid_block_center = tf.expand_dims(valid_block_center, 0) # for channel randnoise =", "w_i < width - (dropblock_size - 1) // 2), tf.logical_and(h_i", "valid_block_center = tf.expand_dims( valid_block_center, -1 if data_format == 'channels_last' else", "= tf.cast(C/8, tf.int32) topk, _ = tf.math.top_k(label, k=k+1) # [N,", "1, 1] spt_mask = (cam < topk) spt_mask = tf.reshape(spt_mask,", "* width**2 / dropblock_size**2 / (width - dropblock_size + 1)**2", "height, width, C]) if data_format == 'channels_last' else tf.reshape(net, [N,", "[N, C, H, W] gap_w : [gap_C, num_of_class] ''' if", "# def _compute_gradients(tensor, var_list): # grads = tf.gradients(tensor, var_list) #", "# topk, _ = tf.math.top_k(cam_mask, k=tf.cast(seed_drop_rate*shape[-1], tf.int32)) # [N, 1]", "// CG if CG == None: CG = C //", "1] spt_mask = (cam < topk) spt_mask = tf.reshape(spt_mask, [N,", "(cam_mask < topk) # # cam_mask = tf.cast(tf.equal(cam_mask, tf.reduce_max(cam_mask, reduction_indices=[1],", "topk = tf.expand_dims(topk, 1) # [N, C, 1] chan_mask =", "None: gap_w = tf.convert_to_tensor(gap_w, tf.float32) gap_C, num = tf.squeeze(gap_w, 0).get_shape().as_list()", "height, width]) # channel k = tf.cast(C/8, tf.int32) topk, _", "tf.reshape(cam, [N*C, height*width]) # chan_mask = tf.reshape(tf.nn.sigmoid(cam), [N, height, width,", "// 2)) valid_block_center = tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims(valid_block_center, -1", "tf.reshape(spt_mask, [N, height, width, 1]) if data_format == 'channels_last' else", "1] net = tf.reshape(net, [N, height*width, C]) if data_format ==", "['dropblock', 'dropblock2','dropblock3','dropblock4'] # 1: paper baseline; 2: group dropout; 3:", "data_format == 'channels_last' else tf.reshape(net, [N, C, height*width]) # #", "== 'channels_last' else tf.reshape(net, [N, C, height*width]) # # cam_mean", "height, width, 1]) if data_format == 'channels_last' else tf.reshape(tf.nn.sigmoid(cam), [N,", "dropblock.py from __future__ import absolute_import from __future__ import division from", "together. DropBlock works better than dropout on convolutional layers due", "Forces the block to be inside the feature map. w_i,", "= tf.maximum(tf.multiply(net, weights), 0) # [N, C, width*height] # cam", "if logits == False: # return logits # U =", "1) // 2)) valid_block_center = tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims(", "== width: block_pattern = tf.reduce_min( block_pattern, axis=[1, 2] if data_format", "= tf.squeeze(gap_w, 0).get_shape().as_list() # [gap_C, num] gap_w = tf.reshape(gap_w, [C,", "keep_prob) * width**2 * G**2 / (C * dropblock_size**2) /", "_, C, width, height = net.get_shape().as_list() if width != height:", "= tf.reduce_max(-block_pattern, reduction_indices=[2]) block_pattern = -tf.nn.max_pool(block_pattern, ksize=ksize, strides=[1, 1, 1,", "# [gap_C, num] gap_w = tf.reshape(gap_w, [C, gap_C//C, num]) gap_w", "cam = tf.matmul(weights, net, transpose_a=True) # [N, 1, width*height] #", "-tf.nn.max_pool( -block_pattern, ksize=ksize, strides=[1, 1, 1, 1], padding='SAME', data_format='NHWC' if", "tf.reduce_max(cam_mask, reduction_indices=[1], keepdims=True)), tf.float32) # cam_mask = tf.expand_dims(cam_mask, 2) #", "keep_prob is None: return net tf.logging.info('Applying DropBlock: dropblock_size {}, net.shape", "Conv layer tensor [?,2048,10,10] # def _compute_gradients(tensor, var_list): # grads", "works better than dropout on convolutional layers due to the", "chan_mask = tf.reshape(cam, [N*C, height*width]) if data_format == 'channels_last' else", "raise ValueError('Input tensor with width!=height is not supported.') if G", "# cam = tf.nn.avg_pool(cam, ksize=[1, 1, dropblock_size, dropblock_size], strides=[1, 1,", "(1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast( (1 - seed_drop_rate), dtype=tf.float32)", "# spt_mask = tf.not_equal(cam, tf.reduce_max(cam, reduction_indices=[2], keepdims=True)) # cam =", "not supported.') dropblock_size = min(dropblock_size, width) # seed_drop_rate is the", "= [1, 1, dropblock_size, dropblock_size] block_pattern = tf.reduce_max(-block_pattern, reduction_indices=[2]) block_pattern", "tf.reduce_mean(norm_grads, reduction_indices=[2,3]) # [N, C] # weights = tf.expand_dims(weights, 2)", "= tf.gradients(cost, net)[0] # if not gap_w is None: #", "0], [0, 0], [left_or_top, right_or_bot], [left_or_top, right_or_bot]]) # cam =", "model code from tensorpack.tfutils.tower import get_current_tower_context from tensorpack.models import GlobalAvgPooling,", "[2, 3], keepdims=True) else: if data_format == 'channels_last': ksize =", "/ tf.cast(percent_ones, net.dtype) * tf.cast( block_pattern, net.dtype) return net def", "tf.expand_dims(topk, 1) # [N, C, 1] chan_mask = (label <", "= logits - tf.log(-tf.log(U + eps) + eps) # cam_mask", "tf.cast(tf.equal(cam_mask, tf.reduce_max(cam_mask, reduction_indices=[1], keepdims=True)), tf.float32) # cam_mask = tf.expand_dims(cam_mask, 2)", "dropout; 3: group soft-dropout; 4: Uout group dropout def dropblock(net,", "grads = _compute_gradients(cost, [net])[0] # norm_grads = tf.divide(grads, tf.sqrt(tf.reduce_mean(tf.square(grads), reduction_indices=[2,3],", "tf.reshape(gap_w, [C, gap_C//C, num]) gap_w = tf.reduce_mean(gap_w, reduction_indices=[1]) # [C,", "dropblock_size + 1)**2 cam_mask = _get_cam(net, label, flag, dropblock_size, data_format)", "(width - dropblock_size + 1)**2) seed_drop_rate = (1.0 - keep_prob)", "1 else dropblock_size-left_or_top-1 # cam = tf.pad(cam, [[0, 0], [0,", "height, width]` or \"channels_last for `[batch, height, width, channels]`. Returns:", "be dropped by DropBlock. data_format: `str` either \"channels_first\" for `[batch,", "# for channel randnoise = tf.random_uniform([N, G, 1, width, height],", "# [N, C, 1] # cam_mask = (cam_mask < topk)", ">= int(dropblock_size // 2), w_i < width - (dropblock_size -", "= tf.expand_dims(chan_mask, 2) # [N, C, 1, 1] cam_mask =", "# U = tf.random_uniform(tf.shape(logits), minval=0, maxval=1) # y = logits", "width) # seed_drop_rate is the gamma parameter of DropBlcok. seed_drop_rate", "C // CG if CG == None: CG = C", "num] label = tf.gather(tf.transpose(gap_w), label) # [N, C] # spatial", "tf.reshape(cam, [N, C, height, width]) # else: # cam =", "'channels_last' else tf.reshape(tf.nn.sigmoid(cam), [N, 1, height, width]) else: cam_mask =", "the gamma parameter of DropBlcok. seed_drop_rate = (1.0 - keep_prob)", "cam = tf.reshape(cam, [N, height, width, C]) if data_format ==", "eps=1e-20): # if logits == False: # return logits #", "= tf.reshape(net, [N, height*width, C]) if data_format == 'channels_last' else", "'channels_last' else tf.reshape(cam, [N, 1, height, width]) # cam =", "2), w_i < width - (dropblock_size - 1) // 2),", "for convolutional neural networks. DropBlock is a form of structured", "valid_block_center = tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims(valid_block_center, -1 if data_format", "height, width]) return net def CamDrop(net, keep_prob, dropblock_size, flag=None, label=None,", "'channels_last' else [2, 3], keepdims=True) else: if data_format == 'channels_last':", "__future__ import division from __future__ import print_function import re import", "data_format == 'channels_last' else tf.reshape(cam, [N*C, height*width]) # chan_mask =", "< width - (dropblock_size - 1) // 2), tf.logical_and(h_i >=", "# def _get_gradcam(net, cost=None, gap_w=None, data_format='channels_first'): # # Conv layer", "tf.reshape(net, [N, C, height*width]) cam = tf.matmul(weights, net, transpose_a=True) #", "tf.gradients(tensor, var_list) # return [grad if grad is not None", "paper baseline; 2: group dropout; 3: group soft-dropout; 4: Uout", "= tf.reshape(net, [N, height, width, C]) if data_format == 'channels_last'", "[N*C, height*width]) if data_format == 'channels_last' else tf.reshape(tf.nn.softmax(cam), [N*C, height*width])", "[N, C, 1, 1] # return cam_mask ctx = get_current_tower_context()", "C, height, width = net.get_shape().as_list() # N = tf.shape(net)[0] #", "(dropblock_size-1) // 2 # right_or_bot = left_or_top if dropblock_size %", "1] chan_mask = (label < topk) chan_mask = tf.expand_dims(chan_mask, 2)", "data_format='channels_first'): '''CamDrop''' def _get_cam(net, label, flag, dropblock_size, data_format='channels_first'): ''' net:", "from tensorpack.models import GlobalAvgPooling, FullyConnected import tensorflow as tf __all__", "units in convolutional layers are spatially correlated. See https://arxiv.org/pdf/1810.12890.pdf for", "(dropblock_size - 1) // 2)) valid_block_center = tf.expand_dims(valid_block_center, 0) valid_block_center", "net.shape)) if data_format == 'channels_last': N, height, width, C =", "else None if not gap_w is None: gap_w = tf.convert_to_tensor(gap_w,", "[1, 1, dropblock_size, dropblock_size] block_pattern = -tf.nn.max_pool( -block_pattern, ksize=ksize, strides=[1,", "def dropblock2(net, keep_prob, dropblock_size, G=None, CG=None, data_format='channels_first'): \"\"\" mimic GN", "tfv1 as tf # this should be avoided first in", "N, C, height, width = net.get_shape().as_list() N = tf.shape(net)[0] gap_w", "'linear/W') if flag > 0 else None if not gap_w", "1, 1], padding='SAME', data_format='NCHW') block_pattern = tf.expand_dims(block_pattern, 2) percent_ones =", "C] # spatial weights = tf.expand_dims(label, 2) # [N, C,", "[N, height, width, C]) if data_format == 'channels_last' else tf.reshape(net,", "var_list): # grads = tf.gradients(tensor, var_list) # return [grad if", "< topk) chan_mask = tf.expand_dims(chan_mask, 2) # [N, C, 1]", "dropblock_size + 1)**2 # Forces the block to be inside", "y = logits - tf.log(-tf.log(U + eps) + eps) #", "# from tensorpack.tfutils.compat import tfv1 as tf # this should", "if grad is not None else tf.zeros_like(var) # for var,", "cam = tf.reshape(cam, [N, height, width, 1]) if data_format ==", "/ (width - dropblock_size + 1)**2 cam_mask = _get_cam(net, label,", "layer tensor [?,2048,10,10] # def _compute_gradients(tensor, var_list): # grads =", "== 'channels_last': _, width, height, C = net.get_shape().as_list() else: _,", "else: cam_mask = False return cam_mask # def _get_gradcam(net, cost=None,", "form of structured dropout, where units in a contiguous region", "tf.math.top_k(cam_mask, k=tf.cast(seed_drop_rate*shape[-1], tf.int32)) # [N, 1] # topk = tf.gather(topk,", "2)) valid_block_center = tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims( valid_block_center, -1", "= net / tf.cast(percent_ones, net.dtype) * tf.cast(block_pattern, net.dtype) return net", "is not supported.') if G == None: G = C", "tf # this should be avoided first in model code", "C, height, width]) # else: # cam = 0. #", "or \"channels_last for `[batch, height, width, channels]`. Returns: A version", "None: return net tf.logging.info('Applying DropBlock: dropblock_size {}, net.shape {}'.format(dropblock_size, net.shape))", "= cam / tf.reduce_max(cam, reduction_indices=[1,2], keepdims=True) # cam = tf.reshape(cam,", "height*width, C]) if data_format == 'channels_last' else tf.reshape(net, [N, C,", "== 'channels_last': # N, height, width, C = net.get_shape().as_list() #", "return cam # def _gumbel_softmax(logits, tau, shape, seed_drop_rate, eps=1e-20): #", "G, 1, width, height], dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center,", "= tf.reshape(cam, [N, height*width, 1]) if data_format == 'channels_last' else", "data_format == 'channels_last' else tf.reshape(tf.nn.sigmoid(cam), [N, 1, height, width]) else:", "+ tf.cast((1 - seed_drop_rate), dtype=tf.float32) + randnoise) >= 1 block_pattern", "if data_format == 'channels_last': ksize = [1, dropblock_size, dropblock_size, 1]", "block to be inside the feature map. w_i, h_i =", "num = tf.squeeze(gap_w, 0).get_shape().as_list() # [gap_C, num] gap_w = tf.reshape(gap_w,", "[gap_C, num] gap_w = tf.reshape(gap_w, [C, gap_C//C, num]) gap_w =", "H, W] gap_w : [gap_C, num_of_class] ''' if data_format ==", "/ (C * dropblock_size**2) / (C * (width - dropblock_size", "bool(ctx.is_training) if not is_training or keep_prob is None: return net", "== 'channels_last': _, width, height, _ = net.get_shape().as_list() else: _,", "width, C = net.get_shape().as_list() else: N, C, height, width =", "def _get_cam(net, label, flag, dropblock_size, data_format='channels_first'): ''' net: [N, C,", "height, width = net.get_shape().as_list() # N = tf.shape(net)[0] # grads", "= tf.expand_dims(valid_block_center, 0) # for channel randnoise = tf.random_uniform([N, G,", "flag, dropblock_size, data_format='channels_first'): ''' net: [N, C, H, W] gap_w", "# cam_mean = 1 + tf.matmul(net, weights, transpose_a=True) # [N,", "reduction_indices=[1]) # [C, num] label = tf.gather(tf.transpose(gap_w), label) # [N,", "topk) spt_mask = tf.reshape(spt_mask, [N, height, width, 1]) if data_format", "# # Normalizing the gradients # if data_format == 'channels_last':", "keep_prob, dropblock_size, G=None, CG=None, data_format='channels_first'): \"\"\" mimic GN \"\"\" ctx", "tf.cast(height*width/dropblock_size**2, tf.int32) topk, _ = tf.math.top_k(cam, k=k) # [N, 1,", "dtype=tf.float32) + randnoise) >= 1 block_pattern = tf.logical_or(block_pattern, cam_mask) block_pattern", "for `[batch, channels, height, width]` or \"channels_last for `[batch, height,", "(label < topk) chan_mask = tf.expand_dims(chan_mask, 2) # [N, C,", "= net.get_shape().as_list() # else: # N, C, height, width =", "== 'channels_last' else 'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern),", "'channels_last': # N, height, width, C = net.get_shape().as_list() # else:", "height = net.get_shape().as_list() if width != height: raise ValueError('Input tensor", "zip(var_list, grads)] # # grads = tf.gradients(cost, net)[0] # if", "// 2)) valid_block_center = tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims( valid_block_center,", "= net.get_shape().as_list() else: _, _, width, height = net.get_shape().as_list() if", "= (cam < topk) spt_mask = tf.reshape(spt_mask, [N, height, width,", "< width - (dropblock_size - 1) // 2)) valid_block_center =", "height*width]) # chan_mask = tf.reshape(cam, [N*C, height*width]) if data_format ==", "# y = logits - tf.log(-tf.log(U + eps) + eps)", "tf.logical_and(h_i >= int(dropblock_size // 2), h_i < width - (dropblock_size", "not None else tf.zeros_like(var) # for var, grad in zip(var_list,", "= tf.gather(tf.transpose(gap_w), label) # [N, C] # spatial weights =", "= tf.shape(net)[0] # grads = _compute_gradients(cost, [net])[0] # norm_grads =", "C = net.get_shape().as_list() # else: # N, C, height, width", "1, 1, 1], padding='SAME', data_format='NCHW') block_pattern = tf.expand_dims(block_pattern, 2) percent_ones", "block_pattern, axis=[1, 2] if data_format == 'channels_last' else [2, 3],", "None: CG = C // G net = tf.reshape(net, [N,", "randnoise) >= 1 block_pattern = tf.cast(block_pattern, dtype=tf.float32) if dropblock_size ==", "== 'channels_last' else [2, 3], keepdims=True) else: if data_format ==", "reduction_indices=[2,3], keepdims=True)) + tf.constant(1e-5)) # weights = tf.reduce_mean(norm_grads, reduction_indices=[2,3]) #", "# seed_drop_rate is the gamma parameter of DropBlcok. seed_drop_rate =", "tau, shape, seed_drop_rate, eps=1e-20): # if logits == False: #", "if data_format == 'channels_last' else 'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32)", "# else: # cam = 0. # return cam #", "label=None, G=None, CG=None, data_format='channels_first'): \"\"\"DropBlock: a regularization method for convolutional", "import re import six # from tensorpack.tfutils.compat import tfv1 as", "tf.expand_dims(chan_mask, 2) # [N, C, 1] chan_mask = tf.expand_dims(chan_mask, 2)", "= net / tf.cast(percent_ones, net.dtype) * tf.cast( block_pattern, net.dtype) return", "_ = tf.math.top_k(cam, k=k) # [N, 1, k] topk =", "[N*C, height*width]) # chan_mask = tf.reshape(tf.nn.sigmoid(cam), [N, height, width, 1])", "= tf.gather(topk, indices=tf.cast(seed_drop_rate*shape[-1], tf.int32)-1, axis=1) # topk = tf.expand_dims(topk, 1)", "( width - dropblock_size + 1)**2 # Forces the block", "[N, C, width*height] # cam = cam_mean*cam_chan # # Passing", "2)) valid_block_center = tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims(valid_block_center, -1 if", "tf.reduce_min( block_pattern, axis=[1, 2] if data_format == 'channels_last' else [2,", "# N, C, height, width = net.get_shape().as_list() # N =", "as tf __all__ = ['dropblock', 'dropblock2','dropblock3','dropblock4'] # 1: paper baseline;", "from __future__ import absolute_import from __future__ import division from __future__", "or `Tensor` keep_prob parameter of DropBlock. \"None\" means no DropBlock.", "= (cam_mask < topk) # # cam_mask = tf.cast(tf.equal(cam_mask, tf.reduce_max(cam_mask,", "(C * (width - dropblock_size + 1)**2) seed_drop_rate = (1.0", "# [N, 1, k] topk = tf.gather(topk, indices=[k-1], axis=-1) #", "else tf.reshape(spt_mask, [N, 1, height, width]) # channel k =", "get_current_tower_context() is_training = bool(ctx.is_training) if not is_training or keep_prob is", "G net = tf.reshape(net, [N, G, CG, height, width]) dropblock_size", "# [N, C] # weights = tf.expand_dims(weights, 2) # [N,", "net.shape)) if data_format == 'channels_last': _, width, height, C =", "tf.matmul(net, weights, transpose_a=True) # [N, width*height, 1] # cam_mean =", "= 1 + tf.matmul(net, weights, transpose_a=True) # [N, width*height, 1]", "get_current_tower_context from tensorpack.models import GlobalAvgPooling, FullyConnected import tensorflow as tf", "Uout group dropout def dropblock(net, keep_prob, dropblock_size, gap_w=None, label=None, G=None,", "`bool` for whether the model is training. keep_prob: `float` or", "chan_mask = tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) if data_format == 'channels_last' else", "valid_block_center, -1 if data_format == 'channels_last' else 0) randnoise =", "- 1) // 2)) valid_block_center = tf.expand_dims(valid_block_center, 0) valid_block_center =", "block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast( (1 -", "height, width, C = net.get_shape().as_list() # else: # N, C,", "# if logits == False: # return logits # U", "# [N, width*height, 1] # cam_mean = tf.maximum(tf.matmul(weights, net, transpose_a=True),", "is None: # # Normalizing the gradients # if data_format", "'channels_last' else tf.reshape(cam, [N, C, height, width]) # else: #", "''' if data_format == 'channels_last': N, height, width, C =", "= tf.expand_dims(topk, 1) # [N, C, 1] chan_mask = (label", "\"\"\" ctx = get_current_tower_context() is_training = bool(ctx.is_training) if not is_training", "data_format) # Forces the block to be inside the feature", "block_pattern, net.dtype) return net def dropblock2(net, keep_prob, dropblock_size, G=None, CG=None,", "= False return cam_mask # def _get_gradcam(net, cost=None, gap_w=None, data_format='channels_first'):", "# File: dropblock.py from __future__ import absolute_import from __future__ import", "# if not gap_w is None: # # Normalizing the", "height: raise ValueError('Input tensor with width!=height is not supported.') dropblock_size", "first in model code from tensorpack.tfutils.tower import get_current_tower_context from tensorpack.models", "keep_prob, dropblock_size, flag=None, label=None, G=None, CG=None, data_format='channels_first'): '''CamDrop''' def _get_cam(net,", "either \"channels_first\" for `[batch, channels, height, width]` or \"channels_last for", "else tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) # chan_mask = tf.reshape(cam, [N*C, height*width])", "topk = tf.gather(topk, indices=[k-1], axis=-1) # [N, 1, 1] spt_mask", "with width!=height is not supported.') if G == None: G", "= net.get_shape().as_list() N = tf.shape(net)[0] gap_w = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'linear/W') if", "# Conv layer tensor [?,2048,10,10] # def _compute_gradients(tensor, var_list): #", "if data_format == 'channels_last': _, width, height, C = net.get_shape().as_list()", "[N, C, height, width]) # else: # cam = 0.", "flag, dropblock_size, data_format) # Forces the block to be inside", "is not None else tf.zeros_like(var) # for var, grad in", "contiguous region of a feature map are dropped together. DropBlock", "ksize = [1, dropblock_size, dropblock_size, 1] else: ksize = [1,", "if not is_training or keep_prob is None: return net tf.logging.info('Applying", "width) # seed_drop_rate is the gamma parameter of DropBlcok. #", "1)**2 # Forces the block to be inside the feature", "= tf.cast(block_pattern, dtype=tf.float32) if dropblock_size == width: block_pattern = tf.reduce_min(block_pattern,", "`Tensor` keep_prob parameter of DropBlock. \"None\" means no DropBlock. dropblock_size:", "net def dropblock2(net, keep_prob, dropblock_size, G=None, CG=None, data_format='channels_first'): \"\"\" mimic", "data_format == 'channels_last': _, width, height, _ = net.get_shape().as_list() else:", "C, height*width]) cam = tf.matmul(weights, net, transpose_a=True) # [N, 1,", "gamma parameter of DropBlcok. # seed_drop_rate = (1.0 - keep_prob)", "# 1: paper baseline; 2: group dropout; 3: group soft-dropout;", "cam = cam / tf.reduce_max(cam, reduction_indices=[1,2], keepdims=True) # cam =", "'channels_last' else 0) randnoise = tf.random_uniform(tf.shape(net), dtype=tf.float32) block_pattern = (1", "= net.get_shape().as_list() N = tf.shape(net)[0] if width != height: raise", "-block_pattern, ksize=ksize, strides=[1, 1, 1, 1], padding='SAME', data_format='NHWC' if data_format", "label) # [N, C] # spatial weights = tf.expand_dims(label, 2)", "else tf.reshape(cam, [N, 1, height, width]) # cam = tf.nn.avg_pool(cam,", "`int` size of blocks to be dropped by DropBlock. data_format:", "= tf.nn.avg_pool(cam, ksize=[1, 1, dropblock_size, dropblock_size], strides=[1, 1, 1, 1],", "= net.get_shape().as_list() else: N, C, height, width = net.get_shape().as_list() N", "1, 1], padding='SAME', data_format='NHWC' if data_format == 'channels_last' else 'NCHW')", "tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast( tf.size(block_pattern), tf.float32) net = net /", "reduction_indices=[1], keepdims=True)), tf.float32) # cam_mask = tf.expand_dims(cam_mask, 2) # [N,", "net.dtype) return net def dropblock2(net, keep_prob, dropblock_size, G=None, CG=None, data_format='channels_first'):", "padding='VALID', data_format='NCHW') # left_or_top = (dropblock_size-1) // 2 # right_or_bot", ">= int(dropblock_size // 2), h_i < width - (dropblock_size -", "= (1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast((1 - seed_drop_rate), dtype=tf.float32)", "seed_drop_rate = (1.0 - keep_prob) * width**2 / dropblock_size**2 /", "C // G net = tf.reshape(net, [N, G, CG, height,", "data_format == 'channels_last' else tf.reshape(net, [N, C, height*width]) cam =", "eps) + eps) # cam_mask = tf.nn.softmax(y / tau) #", "DropBlock. data_format: `str` either \"channels_first\" for `[batch, channels, height, width]`", "__future__ import print_function import re import six # from tensorpack.tfutils.compat", "tf.logical_or(block_pattern, cam_mask) block_pattern = tf.cast(block_pattern, dtype=tf.float32) if dropblock_size == width:", "keep_prob, dropblock_size, gap_w=None, label=None, G=None, CG=None, data_format='channels_first'): \"\"\"DropBlock: a regularization", "1] # cam_mask = tf.expand_dims(cam_mask, 2) # [N, C, 1,", "_get_cam(net, label, flag, dropblock_size, data_format='channels_first'): ''' net: [N, C, H,", "a contiguous region of a feature map are dropped together.", "// 2), w_i < width - (dropblock_size - 1) //", "net.dtype) net = tf.reshape(net, [N, height, width, C]) if data_format", "keepdims=True) else: if data_format == 'channels_last': ksize = [1, dropblock_size,", "reduction_indices=[2], keepdims=True)) # cam = tf.reshape(cam, [N, height, width, 1])", "# [N, C, 1] chan_mask = tf.expand_dims(chan_mask, 2) # [N,", "C = net.get_shape().as_list() else: N, C, height, width = net.get_shape().as_list()", "height, width = net.get_shape().as_list() N = tf.shape(net)[0] if width !=", "3], keepdims=True) else: if data_format == 'channels_last': ksize = [1,", "== 'channels_last' else tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) # chan_mask = tf.reshape(cam,", "(C * dropblock_size**2) / (C * (width - dropblock_size +", "data_format == 'channels_last' else 0) randnoise = tf.random_uniform(tf.shape(net), dtype=tf.float32) block_pattern", "width = net.get_shape().as_list() N = tf.shape(net)[0] if width != height:", "applied. Raises: if width and height of the input tensor", "= tf.expand_dims(valid_block_center, 0) # for batch valid_block_center = tf.expand_dims(valid_block_center, 0)", "height: raise ValueError('Input tensor with width!=height is not supported.') if", "height, width]) # cam = tf.nn.avg_pool(cam, ksize=[1, 1, dropblock_size, dropblock_size],", "height*width]) if data_format == 'channels_last' else tf.reshape(cam, [N*C, height*width]) #", "chan_mask = tf.expand_dims(chan_mask, 2) # [N, C, 1] chan_mask =", "None else tf.zeros_like(var) # for var, grad in zip(var_list, grads)]", "input tensor. is_training: `bool` for whether the model is training.", "if data_format == 'channels_last' else tf.reshape(tf.nn.sigmoid(cam), [N, 1, height, width])", "padding='SAME', data_format='NHWC' if data_format == 'channels_last' else 'NCHW') percent_ones =", "tf.reshape(spt_mask, [N, 1, height, width]) # channel k = tf.cast(C/8,", "= [1, 1, dropblock_size, dropblock_size] block_pattern = -tf.nn.max_pool( -block_pattern, ksize=ksize,", "tf.expand_dims(label, 2) # [N, C, 1] net = tf.reshape(net, [N,", "{}, net.shape {}'.format(dropblock_size, net.shape)) if data_format == 'channels_last': _, width,", "// 2 # right_or_bot = left_or_top if dropblock_size % 2", "to the fact that activation units in convolutional layers are", "CG == None: CG = C // G net =", "= tf.shape(net)[0] gap_w = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'linear/W') if flag > 0", "# this should be avoided first in model code from", "- keep_prob) * width**2 / dropblock_size**2 / ( width -", "- 1) // 2), tf.logical_and(h_i >= int(dropblock_size // 2), h_i", "+ tf.cast( (1 - seed_drop_rate), dtype=tf.float32) + randnoise) >= 1", "dtype=tf.float32) + randnoise) >= 1 block_pattern = tf.cast(block_pattern, dtype=tf.float32) if", "cam_mask ctx = get_current_tower_context() is_training = bool(ctx.is_training) if not is_training", "block_pattern = tf.reduce_min(block_pattern, axis=[2, 3, 4], keepdims=True) else: ksize =", "1)**2 cam_mask = _get_cam(net, label, flag, dropblock_size, data_format) # Forces", "+ 1)**2 # Forces the block to be inside the", "region of a feature map are dropped together. DropBlock works", "var_list) # return [grad if grad is not None else", "tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) # chan_mask = tf.reshape(cam, [N*C, height*width]) if", "channels]`. Returns: A version of input tensor with DropBlock applied.", "[N, height, width, 1]) if data_format == 'channels_last' else tf.reshape(tf.nn.sigmoid(cam),", "'channels_last' else tf.reshape(spt_mask, [N, 1, height, width]) # channel k", "N, C, height, width = net.get_shape().as_list() N = tf.shape(net)[0] if", "'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast( tf.size(block_pattern), tf.float32) net", "[C, num] label = tf.gather(tf.transpose(gap_w), label) # [N, C] #", "2), tf.logical_and(h_i >= int(dropblock_size // 2), h_i < width -", "# [N, C, 1, 1] # return cam_mask ctx =", "_, width, height, _ = net.get_shape().as_list() else: _, _, width,", "net.get_shape().as_list() N = tf.shape(net)[0] gap_w = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'linear/W') if flag", "tf.shape(net)[0] # grads = _compute_gradients(cost, [net])[0] # norm_grads = tf.divide(grads,", "= bool(ctx.is_training) if not is_training or keep_prob is None: return", "tf.gather(topk, indices=[k-1], axis=-1) # [N, 1, 1] spt_mask = (cam", "width, C]) if data_format == 'channels_last' else tf.reshape(net, [N, C,", "DropBlock: dropblock_size {}, net.shape {}'.format(dropblock_size, net.shape)) if data_format == 'channels_last':", "the fact that activation units in convolutional layers are spatially", "1, 1, 1], padding='SAME', data_format='NHWC' if data_format == 'channels_last' else", "G=None, CG=None, data_format='channels_first'): \"\"\" mimic GN \"\"\" ctx = get_current_tower_context()", "for batch valid_block_center = tf.expand_dims(valid_block_center, 0) # for channel randnoise", "units in a contiguous region of a feature map are", "tf.reshape(net, [N, height, width, C]) if data_format == 'channels_last' else", "== 'channels_last': N, height, width, C = net.get_shape().as_list() else: N,", "weights = tf.expand_dims(label, 2) # [N, C, 1] net =", "width = net.get_shape().as_list() N = tf.shape(net)[0] gap_w = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'linear/W')", "0) randnoise = tf.random_uniform(tf.shape(net), dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center,", "{}'.format(dropblock_size, net.shape)) if data_format == 'channels_last': _, width, height, C", "right_or_bot = left_or_top if dropblock_size % 2 == 1 else", "import GlobalAvgPooling, FullyConnected import tensorflow as tf __all__ = ['dropblock',", "`[batch, height, width, channels]`. Returns: A version of input tensor", "/ tf.cast( tf.size(block_pattern), tf.float32) net = net / tf.cast(percent_ones, net.dtype)", "data_format == 'channels_last' else tf.reshape(cam, [N, 1, height*width]) k =", "net def CamDrop(net, keep_prob, dropblock_size, flag=None, label=None, G=None, CG=None, data_format='channels_first'):", "not supported.') if G == None: G = C //", "net.shape {}'.format(dropblock_size, net.shape)) if data_format == 'channels_last': _, width, height,", "percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern), tf.float32) net = net", "ksize = [1, 1, dropblock_size, dropblock_size] block_pattern = -tf.nn.max_pool( -block_pattern,", "1, width, height], dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32)", "else [2, 3], keepdims=True) else: if data_format == 'channels_last': ksize", "2) # [N, C, 1, 1] cam_mask = tf.logical_or(spt_mask, chan_mask)", "{}, net.shape {}'.format(dropblock_size, net.shape)) if data_format == 'channels_last': N, height,", "dropblock_size**2 / ( width - dropblock_size + 1)**2 # Forces", "cam = tf.nn.avg_pool(cam, ksize=[1, 1, dropblock_size, dropblock_size], strides=[1, 1, 1,", "layers due to the fact that activation units in convolutional", "gap_w is None: gap_w = tf.convert_to_tensor(gap_w, tf.float32) gap_C, num =", "gradients # if data_format == 'channels_last': # N, height, width,", "strides=[1, 1, 1, 1], padding='SAME', data_format='NHWC' if data_format == 'channels_last'", "tf.logging.info('Applying DropBlock: dropblock_size {}, net.shape {}'.format(dropblock_size, net.shape)) if data_format ==", "1 block_pattern = tf.cast(block_pattern, dtype=tf.float32) if dropblock_size == width: block_pattern", "C, 1] chan_mask = (label < topk) chan_mask = tf.expand_dims(chan_mask,", "the block to be inside the feature map. w_i, h_i", "tf.shape(net)[0] dropblock_size = min(dropblock_size, width) # seed_drop_rate is the gamma", "indices=[k-1], axis=-1) # [N, 1, 1] spt_mask = (cam <", "1, dropblock_size, dropblock_size], strides=[1, 1, 1, 1], padding='VALID', data_format='NCHW') #", "norm_grads = tf.divide(grads, tf.sqrt(tf.reduce_mean(tf.square(grads), reduction_indices=[2,3], keepdims=True)) + tf.constant(1e-5)) # weights", "or keep_prob is None: return net tf.logging.info('Applying DropBlock: dropblock_size {},", "grads)] # # grads = tf.gradients(cost, net)[0] # if not", "left_or_top = (dropblock_size-1) // 2 # right_or_bot = left_or_top if", "tf.float32) gap_C, num = tf.squeeze(gap_w, 0).get_shape().as_list() # [gap_C, num] gap_w", "tf.random_uniform(tf.shape(logits), minval=0, maxval=1) # y = logits - tf.log(-tf.log(U +", "seed_drop_rate, eps=1e-20): # if logits == False: # return logits", "'channels_last' else tf.reshape(net, [N, C, height, width]) return net def", "0) valid_block_center = tf.expand_dims(valid_block_center, -1 if data_format == 'channels_last' else", "tf.reduce_max(cam, reduction_indices=[1,2], keepdims=True) # cam = tf.reshape(cam, [N, height, width,", "of a feature map are dropped together. DropBlock works better", "/ (C * (width - dropblock_size + 1)**2) seed_drop_rate =", "import print_function import re import six # from tensorpack.tfutils.compat import", "[N, 1] # topk = tf.gather(topk, indices=tf.cast(seed_drop_rate*shape[-1], tf.int32)-1, axis=1) #", "/ tau) # topk, _ = tf.math.top_k(cam_mask, k=tf.cast(seed_drop_rate*shape[-1], tf.int32)) #", "axis=1) # topk = tf.expand_dims(topk, 1) # [N, C, 1]", "tensor with DropBlock applied. Raises: if width and height of", "= tf.expand_dims(cam_mask, 2) # [N, C, 1] # cam_mask =", "if dropblock_size == width: block_pattern = tf.reduce_min(block_pattern, axis=[2, 3, 4],", "cam_mask = tf.cast(tf.equal(cam_mask, tf.reduce_max(cam_mask, reduction_indices=[1], keepdims=True)), tf.float32) # cam_mask =", "# cam_mean = tf.maximum(tf.matmul(weights, net, transpose_a=True), 0) # [N, 1,", "is the gamma parameter of DropBlcok. seed_drop_rate = (1.0 -", "tf.cast(percent_ones, net.dtype) * tf.cast(block_pattern, net.dtype) net = tf.reshape(net, [N, height,", "grad is not None else tf.zeros_like(var) # for var, grad", "net = net / tf.cast(percent_ones, net.dtype) * tf.cast(block_pattern, net.dtype) net", "cam = 0. # return cam # def _gumbel_softmax(logits, tau,", "net.get_shape().as_list() if width != height: raise ValueError('Input tensor with width!=height", "whether the model is training. keep_prob: `float` or `Tensor` keep_prob", "tf.divide(grads, tf.sqrt(tf.reduce_mean(tf.square(grads), reduction_indices=[2,3], keepdims=True)) + tf.constant(1e-5)) # weights = tf.reduce_mean(norm_grads,", "h_i < width - (dropblock_size - 1) // 2)) valid_block_center", "if width != height: raise ValueError('Input tensor with width!=height is", "tf.int32) topk, _ = tf.math.top_k(label, k=k+1) # [N, k] topk", "spt_mask = (cam < topk) spt_mask = tf.reshape(spt_mask, [N, height,", "= tf.reduce_min( block_pattern, axis=[1, 2] if data_format == 'channels_last' else", "coding: utf-8 -*- # File: dropblock.py from __future__ import absolute_import", "# [C, num] label = tf.gather(tf.transpose(gap_w), label) # [N, C]", "tf.logical_and( tf.logical_and(w_i >= int(dropblock_size // 2), w_i < width -", "= (1.0 - keep_prob) * width**2 / dropblock_size**2 / (width", "1 block_pattern = tf.logical_or(block_pattern, cam_mask) block_pattern = tf.cast(block_pattern, dtype=tf.float32) if", "width**2 / dropblock_size**2 / (width - dropblock_size + 1)**2 cam_mask", "with DropBlock applied. Raises: if width and height of the", "k = tf.cast(height*width/dropblock_size**2, tf.int32) topk, _ = tf.math.top_k(cam, k=k) #", "keepdims=True)) + tf.constant(1e-5)) # weights = tf.reduce_mean(norm_grads, reduction_indices=[2,3]) # [N,", "+ 1)**2 cam_mask = _get_cam(net, label, flag, dropblock_size, data_format) #", "+ tf.constant(1e-5)) # weights = tf.reduce_mean(norm_grads, reduction_indices=[2,3]) # [N, C]", "from __future__ import division from __future__ import print_function import re", "block_pattern = tf.reduce_min( block_pattern, axis=[1, 2] if data_format == 'channels_last'", "[N*C, height*width]) if data_format == 'channels_last' else tf.reshape(cam, [N*C, height*width])", "channel randnoise = tf.random_uniform([N, G, 1, width, height], dtype=tf.float32) block_pattern", "== 'channels_last' else tf.reshape(net, [N, C, height*width]) cam = tf.matmul(weights,", "width!=height is not supported.') dropblock_size = min(dropblock_size, width) # seed_drop_rate", "N = tf.shape(net)[0] if width != height: raise ValueError('Input tensor", "logits == False: # return logits # U = tf.random_uniform(tf.shape(logits),", "logits - tf.log(-tf.log(U + eps) + eps) # cam_mask =", "= tf.expand_dims(valid_block_center, 0) # for depth valid_block_center = tf.expand_dims(valid_block_center, 0)", "None: # # Normalizing the gradients # if data_format ==", "\"\"\"DropBlock: a regularization method for convolutional neural networks. DropBlock is", "if data_format == 'channels_last': # N, height, width, C =", "C = net.get_shape().as_list() else: _, C, width, height = net.get_shape().as_list()", "in model code from tensorpack.tfutils.tower import get_current_tower_context from tensorpack.models import", "version of input tensor with DropBlock applied. Raises: if width", "tf.random_uniform([N, G, 1, width, height], dtype=tf.float32) block_pattern = (1 -", "[N, width*height, 1] # cam_mean = tf.maximum(tf.matmul(weights, net, transpose_a=True), 0)", "tf.float32) / tf.cast( tf.size(block_pattern), tf.float32) net = net / tf.cast(percent_ones,", "due to the fact that activation units in convolutional layers", "== False: # return logits # U = tf.random_uniform(tf.shape(logits), minval=0,", "map are dropped together. DropBlock works better than dropout on", "of DropBlcok. # seed_drop_rate = (1.0 - keep_prob) * width**2", "dropblock_size, flag=None, label=None, G=None, CG=None, data_format='channels_first'): '''CamDrop''' def _get_cam(net, label,", "k = tf.cast(C/8, tf.int32) topk, _ = tf.math.top_k(label, k=k+1) #", "tf.reshape(cam, [N*C, height*width]) if data_format == 'channels_last' else tf.reshape(cam, [N*C,", "# # cam_mask = tf.cast(tf.equal(cam_mask, tf.reduce_max(cam_mask, reduction_indices=[1], keepdims=True)), tf.float32) #", "if data_format == 'channels_last': N, height, width, C = net.get_shape().as_list()", "0).get_shape().as_list() # [gap_C, num] gap_w = tf.reshape(gap_w, [C, gap_C//C, num])", "False: # return logits # U = tf.random_uniform(tf.shape(logits), minval=0, maxval=1)", "width, 1]) if data_format == 'channels_last' else tf.reshape(cam, [N, 1,", "if CG == None: CG = C // G net", "= net / tf.cast(percent_ones, net.dtype) * tf.cast(block_pattern, net.dtype) net =", "1], padding='SAME', data_format='NHWC' if data_format == 'channels_last' else 'NCHW') percent_ones", "input tensor with DropBlock applied. Raises: if width and height", "# cam = 0. # return cam # def _gumbel_softmax(logits,", "randnoise = tf.random_uniform([N, G, 1, width, height], dtype=tf.float32) block_pattern =", "[N, C, 1] chan_mask = tf.expand_dims(chan_mask, 2) # [N, C,", "grads = tf.gradients(tensor, var_list) # return [grad if grad is", "# def _gumbel_softmax(logits, tau, shape, seed_drop_rate, eps=1e-20): # if logits", "tensor are not equal. \"\"\" ctx = get_current_tower_context() is_training =", "= tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) if data_format == 'channels_last' else tf.reshape(tf.nn.softmax(cam),", "- dropblock_size + 1)**2) seed_drop_rate = (1.0 - keep_prob) *", "tf.expand_dims(valid_block_center, -1 if data_format == 'channels_last' else 0) randnoise =", "= _get_cam(net, label, flag, dropblock_size, data_format) # Forces the block", "in a contiguous region of a feature map are dropped", "return net def dropblock2(net, keep_prob, dropblock_size, G=None, CG=None, data_format='channels_first'): \"\"\"", "spt_mask = tf.reshape(spt_mask, [N, height, width, 1]) if data_format ==", "net = tf.reshape(net, [N, G, CG, height, width]) dropblock_size =", "= tf.reshape(cam, [N, height, width, 1]) if data_format == 'channels_last'", "/ tf.reduce_max(cam, reduction_indices=[1,2], keepdims=True) # cam = tf.reshape(cam, [N, height,", "tf.logical_or(spt_mask, chan_mask) # chan_mask = tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) if data_format", "tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims(valid_block_center, -1 if data_format == 'channels_last'", "width]) dropblock_size = min(dropblock_size, width) # seed_drop_rate is the gamma", "Args: net: `Tensor` input tensor. is_training: `bool` for whether the", "data_format='NCHW') # left_or_top = (dropblock_size-1) // 2 # right_or_bot =", "3: group soft-dropout; 4: Uout group dropout def dropblock(net, keep_prob,", "grads = tf.gradients(cost, net)[0] # if not gap_w is None:", "= 0. # return cam # def _gumbel_softmax(logits, tau, shape,", "+ randnoise) >= 1 block_pattern = tf.logical_or(block_pattern, cam_mask) block_pattern =", "dropblock_size] block_pattern = -tf.nn.max_pool( -block_pattern, ksize=ksize, strides=[1, 1, 1, 1],", "CG=None, data_format='channels_first'): '''CamDrop''' def _get_cam(net, label, flag, dropblock_size, data_format='channels_first'): '''", "is None: return net tf.logging.info('Applying DropBlock: dropblock_size {}, net.shape {}'.format(dropblock_size,", "seed_drop_rate), dtype=tf.float32) + randnoise) >= 1 block_pattern = tf.logical_or(block_pattern, cam_mask)", "_gumbel_softmax(logits, tau, shape, seed_drop_rate, eps=1e-20): # if logits == False:", "tf.maximum(tf.multiply(net, weights), 0) # [N, C, width*height] # cam =", "height, width]) else: cam_mask = False return cam_mask # def", "equal. \"\"\" ctx = get_current_tower_context() is_training = bool(ctx.is_training) if not", "net.get_shape().as_list() else: N, C, height, width = net.get_shape().as_list() N =", "N, height, width, C = net.get_shape().as_list() else: N, C, height,", "'''CamDrop''' def _get_cam(net, label, flag, dropblock_size, data_format='channels_first'): ''' net: [N,", "else tf.reshape(cam, [N, 1, height*width]) k = tf.cast(height*width/dropblock_size**2, tf.int32) topk,", "'channels_last' else tf.reshape(net, [N, C, height*width]) # # cam_mean =", "keep_prob: `float` or `Tensor` keep_prob parameter of DropBlock. \"None\" means", "cam / tf.reduce_max(cam, reduction_indices=[1,2], keepdims=True) # cam = tf.reshape(cam, [N,", "tf.expand_dims(topk, 1) # [N, C, 1] # cam_mask = (cam_mask", "1]) if data_format == 'channels_last' else tf.reshape(tf.nn.sigmoid(cam), [N, 1, height,", "min(dropblock_size, width) # seed_drop_rate is the gamma parameter of DropBlcok.", "dropblock_size: `int` size of blocks to be dropped by DropBlock.", "# N, height, width, C = net.get_shape().as_list() # else: #", "(width - dropblock_size + 1)**2 cam_mask = _get_cam(net, label, flag,", "tf.reduce_mean(gap_w, reduction_indices=[1]) # [C, num] label = tf.gather(tf.transpose(gap_w), label) #", "else tf.reshape(net, [N, C, height, width]) return net def CamDrop(net,", "= (1.0 - keep_prob) * width**2 * G**2 / (C", "tf.reshape(net, [N, C, height, width]) return net def CamDrop(net, keep_prob,", "topk = tf.gather(topk, indices=tf.cast(seed_drop_rate*shape[-1], tf.int32)-1, axis=1) # topk = tf.expand_dims(topk,", "+ 1)**2) seed_drop_rate = (1.0 - keep_prob) * width**2 /", "net.shape)) if data_format == 'channels_last': _, width, height, _ =", "data_format == 'channels_last': _, width, height, C = net.get_shape().as_list() else:", "CG, height, width]) dropblock_size = min(dropblock_size, width) # seed_drop_rate is", "width]) else: cam_mask = False return cam_mask # def _get_gradcam(net,", "= tf.reshape(gap_w, [C, gap_C//C, num]) gap_w = tf.reduce_mean(gap_w, reduction_indices=[1]) #", "[0, 0], [left_or_top, right_or_bot], [left_or_top, right_or_bot]]) # cam = tf.reshape(cam,", "(1.0 - keep_prob) * width**2 / dropblock_size**2 / (width -", "not supported.') N = tf.shape(net)[0] dropblock_size = min(dropblock_size, width) #", "six # from tensorpack.tfutils.compat import tfv1 as tf # this", "dropblock_size**2 / (width - dropblock_size + 1)**2 # Forces the", "1) # [N, C, 1] # cam_mask = (cam_mask <", "# # cam_mean = 1 + tf.matmul(net, weights, transpose_a=True) #", "reduction_indices=[1,2], keepdims=True) # cam = tf.reshape(cam, [N, height, width, C])", "width - dropblock_size + 1)**2 # Forces the block to", "= tf.math.top_k(cam, k=k) # [N, 1, k] topk = tf.gather(topk,", "cam # def _gumbel_softmax(logits, tau, shape, seed_drop_rate, eps=1e-20): # if", "[N, 1, height, width]) # channel k = tf.cast(C/8, tf.int32)", "File: dropblock.py from __future__ import absolute_import from __future__ import division", "- tf.cast(valid_block_center, dtype=tf.float32) + tf.cast((1 - seed_drop_rate), dtype=tf.float32) + randnoise)", "for details. Args: net: `Tensor` input tensor. is_training: `bool` for", "= -tf.nn.max_pool( -block_pattern, ksize=ksize, strides=[1, 1, 1, 1], padding='SAME', data_format='NHWC'", "ctx = get_current_tower_context() is_training = bool(ctx.is_training) if not is_training or", "dropblock_size, 1] else: ksize = [1, 1, dropblock_size, dropblock_size] block_pattern", "width, height = net.get_shape().as_list() if width != height: raise ValueError('Input", "details. Args: net: `Tensor` input tensor. is_training: `bool` for whether", "# weights = tf.expand_dims(weights, 2) # [N, C, 1] #", "\"channels_first\" for `[batch, channels, height, width]` or \"channels_last for `[batch,", "gap_w = tf.reduce_mean(gap_w, reduction_indices=[1]) # [C, num] label = tf.gather(tf.transpose(gap_w),", "and height of the input tensor are not equal. \"\"\"", "# return [grad if grad is not None else tf.zeros_like(var)", "= tf.nn.softmax(y / tau) # topk, _ = tf.math.top_k(cam_mask, k=tf.cast(seed_drop_rate*shape[-1],", "if data_format == 'channels_last' else tf.reshape(cam, [N, 1, height, width])", "= tf.matmul(weights, net, transpose_a=True) # [N, 1, width*height] # spt_mask", "ksize=ksize, strides=[1, 1, 1, 1], padding='SAME', data_format='NCHW') block_pattern = tf.expand_dims(block_pattern,", "in convolutional layers are spatially correlated. See https://arxiv.org/pdf/1810.12890.pdf for details.", "if width and height of the input tensor are not", "keepdims=True)) # cam = tf.reshape(cam, [N, height, width, 1]) if", "= tf.expand_dims( valid_block_center, -1 if data_format == 'channels_last' else 0)", "cam = tf.reshape(cam, [N, height*width, 1]) if data_format == 'channels_last'", "_ = tf.math.top_k(label, k=k+1) # [N, k] topk = tf.gather(topk,", "axis=1) # [N, 1] topk = tf.expand_dims(topk, 1) # [N,", "return [grad if grad is not None else tf.zeros_like(var) #", "_, width, height, C = net.get_shape().as_list() else: _, C, width,", "from tensorpack.tfutils.compat import tfv1 as tf # this should be", "1: paper baseline; 2: group dropout; 3: group soft-dropout; 4:", "# topk = tf.gather(topk, indices=tf.cast(seed_drop_rate*shape[-1], tf.int32)-1, axis=1) # topk =", "2) percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern), tf.float32) net =", "spatial weights = tf.expand_dims(label, 2) # [N, C, 1] net", "cam_mask = False return cam_mask # def _get_gradcam(net, cost=None, gap_w=None,", "C] # weights = tf.expand_dims(weights, 2) # [N, C, 1]", "import tfv1 as tf # this should be avoided first", "not gap_w is None: # # Normalizing the gradients #", "topk = tf.gather(topk, indices=k, axis=1) # [N, 1] topk =", "[N, height*width, 1]) if data_format == 'channels_last' else tf.reshape(cam, [N,", "int(dropblock_size // 2), h_i < width - (dropblock_size - 1)", "width!=height is not supported.') N = tf.shape(net)[0] dropblock_size = min(dropblock_size,", "height*width, 1]) if data_format == 'channels_last' else tf.reshape(cam, [N, 1,", "= tf.expand_dims(weights, 2) # [N, C, 1] # net =", "print_function import re import six # from tensorpack.tfutils.compat import tfv1", "return logits # U = tf.random_uniform(tf.shape(logits), minval=0, maxval=1) # y", "DropBlock applied. Raises: if width and height of the input", "# cam_chan = tf.maximum(tf.multiply(net, weights), 0) # [N, C, width*height]", "/ dropblock_size**2 / (width - dropblock_size + 1)**2 cam_mask =", "is None: gap_w = tf.convert_to_tensor(gap_w, tf.float32) gap_C, num = tf.squeeze(gap_w,", "- seed_drop_rate), dtype=tf.float32) + randnoise) >= 1 block_pattern = tf.cast(block_pattern,", "dropblock_size, data_format='channels_first'): ''' net: [N, C, H, W] gap_w :", "1] chan_mask = tf.expand_dims(chan_mask, 2) # [N, C, 1, 1]", "to be inside the feature map. w_i, h_i = tf.meshgrid(tf.range(width),", "convolutional layers due to the fact that activation units in", "= tf.shape(net)[0] if width != height: raise ValueError('Input tensor with", "tf.gather(topk, indices=k, axis=1) # [N, 1] topk = tf.expand_dims(topk, 1)", "[N, C, 1, 1] cam_mask = tf.logical_or(spt_mask, chan_mask) # chan_mask", "def _get_gradcam(net, cost=None, gap_w=None, data_format='channels_first'): # # Conv layer tensor", "keep_prob) * width**2 / dropblock_size**2 / ( width - dropblock_size", "C, width, height = net.get_shape().as_list() if width != height: raise", "of DropBlock. \"None\" means no DropBlock. dropblock_size: `int` size of", "(width - dropblock_size + 1)**2 # Forces the block to", "[[0, 0], [0, 0], [left_or_top, right_or_bot], [left_or_top, right_or_bot]]) # cam", "utf-8 -*- # File: dropblock.py from __future__ import absolute_import from", "tf.cast( block_pattern, net.dtype) return net def dropblock2(net, keep_prob, dropblock_size, G=None,", "the feature map. w_i, h_i = tf.meshgrid(tf.range(width), tf.range(width)) valid_block_center =", "net.get_shape().as_list() N = tf.shape(net)[0] if width != height: raise ValueError('Input", ">= 1 block_pattern = tf.cast(block_pattern, dtype=tf.float32) if dropblock_size == width:", "width*height, 1] # cam_mean = tf.maximum(tf.matmul(weights, net, transpose_a=True), 0) #", "1, 1], padding='VALID', data_format='NCHW') # left_or_top = (dropblock_size-1) // 2", "= tf.divide(grads, tf.sqrt(tf.reduce_mean(tf.square(grads), reduction_indices=[2,3], keepdims=True)) + tf.constant(1e-5)) # weights =", "input tensor are not equal. \"\"\" ctx = get_current_tower_context() is_training", "height], dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast(", "% 2 == 1 else dropblock_size-left_or_top-1 # cam = tf.pad(cam,", "transpose_a=True) # [N, width*height, 1] # cam_mean = tf.maximum(tf.matmul(weights, net,", "data_format == 'channels_last' else tf.reshape(cam, [N, 1, height, width]) #", "tf.squeeze(gap_w, 0).get_shape().as_list() # [gap_C, num] gap_w = tf.reshape(gap_w, [C, gap_C//C,", "'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern), tf.float32) net =", "1, k] topk = tf.gather(topk, indices=[k-1], axis=-1) # [N, 1,", "keep_prob parameter of DropBlock. \"None\" means no DropBlock. dropblock_size: `int`", "soft-dropout; 4: Uout group dropout def dropblock(net, keep_prob, dropblock_size, gap_w=None,", "[gap_C, num_of_class] ''' if data_format == 'channels_last': N, height, width,", "is not supported.') dropblock_size = min(dropblock_size, width) # seed_drop_rate is", "[N, 1, width*height] # cam_chan = tf.maximum(tf.multiply(net, weights), 0) #", "cam_mask = _get_cam(net, label, flag, dropblock_size, data_format) # Forces the", "label=None, G=None, CG=None, data_format='channels_first'): '''CamDrop''' def _get_cam(net, label, flag, dropblock_size,", "# [N, 1, width*height] # spt_mask = tf.not_equal(cam, tf.reduce_max(cam, reduction_indices=[2],", "= tf.cast(height*width/dropblock_size**2, tf.int32) topk, _ = tf.math.top_k(cam, k=k) # [N,", "size of blocks to be dropped by DropBlock. data_format: `str`", "if data_format == 'channels_last': _, width, height, _ = net.get_shape().as_list()", "import six # from tensorpack.tfutils.compat import tfv1 as tf #", "dropblock_size, dropblock_size, 1] else: ksize = [1, 1, dropblock_size, dropblock_size]", "should be avoided first in model code from tensorpack.tfutils.tower import", "tf.cast(valid_block_center, dtype=tf.float32) + tf.cast((1 - seed_drop_rate), dtype=tf.float32) + randnoise) >=", "[N, 1, k] topk = tf.gather(topk, indices=[k-1], axis=-1) # [N,", "structured dropout, where units in a contiguous region of a", "0. # return cam # def _gumbel_softmax(logits, tau, shape, seed_drop_rate,", "cam_mean = tf.maximum(tf.matmul(weights, net, transpose_a=True), 0) # [N, 1, width*height]", "dropblock_size], strides=[1, 1, 1, 1], padding='VALID', data_format='NCHW') # left_or_top =", "cam_mask # def _get_gradcam(net, cost=None, gap_w=None, data_format='channels_first'): # # Conv", "if dropblock_size == width: block_pattern = tf.reduce_min( block_pattern, axis=[1, 2]", "tf.gather(tf.transpose(gap_w), label) # [N, C] # spatial weights = tf.expand_dims(label,", "= (1.0 - keep_prob) * width**2 / dropblock_size**2 / (", "tf.expand_dims(valid_block_center, 0) # for depth valid_block_center = tf.expand_dims(valid_block_center, 0) #", "# [N, 1, 1] spt_mask = (cam < topk) spt_mask", "[N, C] # weights = tf.expand_dims(weights, 2) # [N, C,", "from __future__ import print_function import re import six # from", "keepdims=True)), tf.float32) # cam_mask = tf.expand_dims(cam_mask, 2) # [N, C,", "1]) if data_format == 'channels_last' else tf.reshape(cam, [N, 1, height*width])", "transpose_a=True), 0) # [N, 1, width*height] # cam_chan = tf.maximum(tf.multiply(net,", "keep_prob) * width**2 / dropblock_size**2 / (width - dropblock_size +", "where units in a contiguous region of a feature map", "1]) if data_format == 'channels_last' else tf.reshape(cam, [N, 1, height,", "# [N, C, 1] chan_mask = (label < topk) chan_mask", "strides=[1, 1, 1, 1], padding='VALID', data_format='NCHW') # left_or_top = (dropblock_size-1)", "convolutional neural networks. DropBlock is a form of structured dropout,", "better than dropout on convolutional layers due to the fact", "net, transpose_a=True), 0) # [N, 1, width*height] # cam_chan =", "= tf.shape(net)[0] dropblock_size = min(dropblock_size, width) # seed_drop_rate is the", "tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) if data_format == 'channels_last' else tf.reshape(tf.nn.softmax(cam), [N*C,", "data_format == 'channels_last' else tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) # chan_mask =", "right_or_bot]]) # cam = tf.reshape(cam, [N, height*width, 1]) if data_format", "# weights = tf.reduce_mean(norm_grads, reduction_indices=[2,3]) # [N, C] # weights", "/ dropblock_size**2 / ( width - dropblock_size + 1)**2 #", "of structured dropout, where units in a contiguous region of", "tf.expand_dims(chan_mask, 2) # [N, C, 1, 1] cam_mask = tf.logical_or(spt_mask,", "height*width]) if data_format == 'channels_last' else tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) #", "(cam < topk) spt_mask = tf.reshape(spt_mask, [N, height, width, 1])", "-*- # File: dropblock.py from __future__ import absolute_import from __future__", "return net def CamDrop(net, keep_prob, dropblock_size, flag=None, label=None, G=None, CG=None,", "Normalizing the gradients # if data_format == 'channels_last': # N,", "if data_format == 'channels_last' else tf.reshape(cam, [N*C, height*width]) # chan_mask", "# for batch valid_block_center = tf.expand_dims(valid_block_center, 0) # for channel", "transpose_a=True) # [N, 1, width*height] # spt_mask = tf.not_equal(cam, tf.reduce_max(cam,", "tf.float32) # cam_mask = tf.expand_dims(cam_mask, 2) # [N, C, 1]", "dropblock_size {}, net.shape {}'.format(dropblock_size, net.shape)) if data_format == 'channels_last': _,", "height, width, C]) if data_format == 'channels_last' else tf.reshape(cam, [N,", "gap_C//C, num]) gap_w = tf.reduce_mean(gap_w, reduction_indices=[1]) # [C, num] label", "[N, C, 1] # cam_mask = tf.expand_dims(cam_mask, 2) # [N,", "tf.nn.softmax(y / tau) # topk, _ = tf.math.top_k(cam_mask, k=tf.cast(seed_drop_rate*shape[-1], tf.int32))", "1] topk = tf.expand_dims(topk, 1) # [N, C, 1] chan_mask", "tf.cast(tf.size(block_pattern), tf.float32) net = net / tf.cast(percent_ones, net.dtype) * tf.cast(block_pattern,", "G = C // CG if CG == None: CG", "data_format='channels_first'): \"\"\"DropBlock: a regularization method for convolutional neural networks. DropBlock", "= -tf.nn.max_pool(block_pattern, ksize=ksize, strides=[1, 1, 1, 1], padding='SAME', data_format='NCHW') block_pattern", "# cam_mask = tf.nn.softmax(y / tau) # topk, _ =", "CamDrop(net, keep_prob, dropblock_size, flag=None, label=None, G=None, CG=None, data_format='channels_first'): '''CamDrop''' def", "# chan_mask = tf.reshape(tf.nn.sigmoid(cam), [N, height, width, 1]) if data_format", "channel k = tf.cast(C/8, tf.int32) topk, _ = tf.math.top_k(label, k=k+1)", "tf.log(-tf.log(U + eps) + eps) # cam_mask = tf.nn.softmax(y /", "are not equal. \"\"\" ctx = get_current_tower_context() is_training = bool(ctx.is_training)", "tf.expand_dims( valid_block_center, -1 if data_format == 'channels_last' else 0) randnoise", "if not gap_w is None: gap_w = tf.convert_to_tensor(gap_w, tf.float32) gap_C,", "height*width]) # chan_mask = tf.reshape(tf.nn.sigmoid(cam), [N, height, width, 1]) if", "-*- coding: utf-8 -*- # File: dropblock.py from __future__ import", "tf.reduce_max(cam, reduction_indices=[2], keepdims=True)) # cam = tf.reshape(cam, [N, height, width,", "'dropblock2','dropblock3','dropblock4'] # 1: paper baseline; 2: group dropout; 3: group", "[N, C, 1] # cam_mask = (cam_mask < topk) #", "topk, _ = tf.math.top_k(cam_mask, k=tf.cast(seed_drop_rate*shape[-1], tf.int32)) # [N, 1] #", "left_or_top if dropblock_size % 2 == 1 else dropblock_size-left_or_top-1 #", "C, 1] # cam_mask = (cam_mask < topk) # #", "eps) # cam_mask = tf.nn.softmax(y / tau) # topk, _", "[N, C, 1] # net = tf.reshape(net, [N, height*width, C])", "= get_current_tower_context() is_training = bool(ctx.is_training) if not is_training or keep_prob", "'channels_last' else 'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast( tf.size(block_pattern),", "height, width]) # else: # cam = 0. # return", "feature map are dropped together. DropBlock works better than dropout", "CG = C // G net = tf.reshape(net, [N, G,", "`Tensor` input tensor. is_training: `bool` for whether the model is", "dropblock_size**2) / (C * (width - dropblock_size + 1)**2) seed_drop_rate", "C, height, width = net.get_shape().as_list() N = tf.shape(net)[0] if width", "-1 if data_format == 'channels_last' else 0) randnoise = tf.random_uniform(tf.shape(net),", "# cam = cam / tf.reduce_max(cam, reduction_indices=[1,2], keepdims=True) # cam", "= min(dropblock_size, width) # seed_drop_rate is the gamma parameter of", "ksize = [1, 1, dropblock_size, dropblock_size] block_pattern = tf.reduce_max(-block_pattern, reduction_indices=[2])", "= tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims( valid_block_center, -1 if data_format", "tensor with width!=height is not supported.') dropblock_size = min(dropblock_size, width)", "to be dropped by DropBlock. data_format: `str` either \"channels_first\" for", "width]) # cam = tf.nn.avg_pool(cam, ksize=[1, 1, dropblock_size, dropblock_size], strides=[1,", "tf.reduce_min(block_pattern, axis=[2, 3, 4], keepdims=True) else: ksize = [1, 1,", "[N*C, height*width]) # chan_mask = tf.reshape(cam, [N*C, height*width]) if data_format", "# [N, 1, width*height] # cam_chan = tf.maximum(tf.multiply(net, weights), 0)", "[N, C, height*width]) cam = tf.matmul(weights, net, transpose_a=True) # [N,", "1] # cam_mask = (cam_mask < topk) # # cam_mask", "+ eps) # cam_mask = tf.nn.softmax(y / tau) # topk,", "(dropblock_size - 1) // 2), tf.logical_and(h_i >= int(dropblock_size // 2),", "model is training. keep_prob: `float` or `Tensor` keep_prob parameter of", "tf.maximum(tf.matmul(weights, net, transpose_a=True), 0) # [N, 1, width*height] # cam_chan", "[N, C] # spatial weights = tf.expand_dims(label, 2) # [N,", "that activation units in convolutional layers are spatially correlated. See", "tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern), tf.float32) net = net / tf.cast(percent_ones,", "def _compute_gradients(tensor, var_list): # grads = tf.gradients(tensor, var_list) # return", "tf.math.top_k(label, k=k+1) # [N, k] topk = tf.gather(topk, indices=k, axis=1)", "net.dtype) * tf.cast( block_pattern, net.dtype) return net def dropblock2(net, keep_prob,", "* dropblock_size**2) / (C * (width - dropblock_size + 1)**2)", "<reponame>whj363636/CamDrop # -*- coding: utf-8 -*- # File: dropblock.py from", "== 'channels_last': ksize = [1, dropblock_size, dropblock_size, 1] else: ksize", "else 'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast( tf.size(block_pattern), tf.float32)", "tf.random_uniform(tf.shape(net), dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32) + tf.cast((1", "'channels_last' else 'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) / tf.cast(tf.size(block_pattern), tf.float32)", "chan_mask = tf.expand_dims(chan_mask, 2) # [N, C, 1, 1] cam_mask", "= tf.expand_dims(valid_block_center, 0) valid_block_center = tf.expand_dims(valid_block_center, -1 if data_format ==", "flag=None, label=None, G=None, CG=None, data_format='channels_first'): '''CamDrop''' def _get_cam(net, label, flag,", "depth valid_block_center = tf.expand_dims(valid_block_center, 0) # for batch valid_block_center =", "// 2)) valid_block_center = tf.expand_dims(valid_block_center, 0) # for depth valid_block_center", "if G == None: G = C // CG if", "= tf.gather(topk, indices=k, axis=1) # [N, 1] topk = tf.expand_dims(topk,", "return net tf.logging.info('Applying DropBlock: dropblock_size {}, net.shape {}'.format(dropblock_size, net.shape)) if", "[N, height*width, C]) if data_format == 'channels_last' else tf.reshape(net, [N,", "randnoise) >= 1 block_pattern = tf.logical_or(block_pattern, cam_mask) block_pattern = tf.cast(block_pattern,", "supported.') if G == None: G = C // CG", "tf.int32)-1, axis=1) # topk = tf.expand_dims(topk, 1) # [N, C,", "through ReLU # cam = cam / tf.reduce_max(cam, reduction_indices=[1,2], keepdims=True)", "# return logits # U = tf.random_uniform(tf.shape(logits), minval=0, maxval=1) #", "if data_format == 'channels_last' else tf.reshape(net, [N, C, height*width]) cam", "dropblock_size == width: block_pattern = tf.reduce_min( block_pattern, axis=[1, 2] if", "dropblock_size, data_format) # Forces the block to be inside the", "CG=None, data_format='channels_first'): \"\"\" mimic GN \"\"\" ctx = get_current_tower_context() is_training", "= tf.cast(block_pattern, dtype=tf.float32) if dropblock_size == width: block_pattern = tf.reduce_min(", "C, width*height] # cam = cam_mean*cam_chan # # Passing through", "tensorflow as tf __all__ = ['dropblock', 'dropblock2','dropblock3','dropblock4'] # 1: paper", "C]) if data_format == 'channels_last' else tf.reshape(net, [N, C, height,", "tf.cast( (1 - seed_drop_rate), dtype=tf.float32) + randnoise) >= 1 block_pattern", "randnoise = tf.random_uniform(tf.shape(net), dtype=tf.float32) block_pattern = (1 - tf.cast(valid_block_center, dtype=tf.float32)", "# grads = tf.gradients(cost, net)[0] # if not gap_w is", "__future__ import absolute_import from __future__ import division from __future__ import", "tf.cast(valid_block_center, dtype=tf.float32) + tf.cast( (1 - seed_drop_rate), dtype=tf.float32) + randnoise)", "tensor. is_training: `bool` for whether the model is training. keep_prob:", "Passing through ReLU # cam = cam / tf.reduce_max(cam, reduction_indices=[1,2],", "else tf.zeros_like(var) # for var, grad in zip(var_list, grads)] #", "1] else: ksize = [1, 1, dropblock_size, dropblock_size] block_pattern =", "tf.int32)) # [N, 1] # topk = tf.gather(topk, indices=tf.cast(seed_drop_rate*shape[-1], tf.int32)-1,", "height: raise ValueError('Input tensor with width!=height is not supported.') N", "data_format='NHWC' if data_format == 'channels_last' else 'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)),", "N = tf.shape(net)[0] # grads = _compute_gradients(cost, [net])[0] # norm_grads", "data_format='NCHW') block_pattern = tf.expand_dims(block_pattern, 2) percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) /", "4], keepdims=True) else: ksize = [1, 1, dropblock_size, dropblock_size] block_pattern", "training. keep_prob: `float` or `Tensor` keep_prob parameter of DropBlock. \"None\"", "of DropBlcok. seed_drop_rate = (1.0 - keep_prob) * width**2 /", "cam_mean*cam_chan # # Passing through ReLU # cam = cam", "1] # return cam_mask ctx = get_current_tower_context() is_training = bool(ctx.is_training)", "as tf # this should be avoided first in model", "# # Passing through ReLU # cam = cam /", "= tf.expand_dims(label, 2) # [N, C, 1] net = tf.reshape(net,", "if data_format == 'channels_last' else tf.reshape(cam, [N, C, height, width])", "+ randnoise) >= 1 block_pattern = tf.cast(block_pattern, dtype=tf.float32) if dropblock_size", "for channel randnoise = tf.random_uniform([N, G, 1, width, height], dtype=tf.float32)", "height, width, C = net.get_shape().as_list() else: N, C, height, width", "(dropblock_size - 1) // 2)) valid_block_center = tf.expand_dims(valid_block_center, 0) #", "the gamma parameter of DropBlcok. # seed_drop_rate = (1.0 -", "indices=tf.cast(seed_drop_rate*shape[-1], tf.int32)-1, axis=1) # topk = tf.expand_dims(topk, 1) # [N,", "# for depth valid_block_center = tf.expand_dims(valid_block_center, 0) # for batch", "width: block_pattern = tf.reduce_min(block_pattern, axis=[2, 3, 4], keepdims=True) else: ksize", "0], [left_or_top, right_or_bot], [left_or_top, right_or_bot]]) # cam = tf.reshape(cam, [N,", "are spatially correlated. See https://arxiv.org/pdf/1810.12890.pdf for details. Args: net: `Tensor`", "tf.convert_to_tensor(gap_w, tf.float32) gap_C, num = tf.squeeze(gap_w, 0).get_shape().as_list() # [gap_C, num]", "[N, 1, height, width]) else: cam_mask = False return cam_mask", "else: _, _, width, height = net.get_shape().as_list() if width !=", "if data_format == 'channels_last' else tf.reshape(spt_mask, [N, 1, height, width])", "G**2 / (C * dropblock_size**2) / (C * (width -", "1, 1] cam_mask = tf.logical_or(spt_mask, chan_mask) # chan_mask = tf.reshape(tf.nn.softmax(cam),", "== 'channels_last' else tf.reshape(cam, [N, C, height, width]) # else:", "weights = tf.reduce_mean(norm_grads, reduction_indices=[2,3]) # [N, C] # weights =", "= tf.maximum(tf.matmul(weights, net, transpose_a=True), 0) # [N, 1, width*height] #", "gap_w = tf.reshape(gap_w, [C, gap_C//C, num]) gap_w = tf.reduce_mean(gap_w, reduction_indices=[1])", "tf.cast(percent_ones, net.dtype) * tf.cast( block_pattern, net.dtype) return net def dropblock2(net,", "fact that activation units in convolutional layers are spatially correlated.", "width]) # channel k = tf.cast(C/8, tf.int32) topk, _ =", "# norm_grads = tf.divide(grads, tf.sqrt(tf.reduce_mean(tf.square(grads), reduction_indices=[2,3], keepdims=True)) + tf.constant(1e-5)) #", "0) # for channel randnoise = tf.random_uniform([N, G, 1, width,", "data_format == 'channels_last' else 'NCHW') percent_ones = tf.cast(tf.reduce_sum((block_pattern)), tf.float32) /", "= tf.cast(tf.equal(cam_mask, tf.reduce_max(cam_mask, reduction_indices=[1], keepdims=True)), tf.float32) # cam_mask = tf.expand_dims(cam_mask,", "net = net / tf.cast(percent_ones, net.dtype) * tf.cast(block_pattern, net.dtype) return", "height, width]) dropblock_size = min(dropblock_size, width) # seed_drop_rate is the", "= net.get_shape().as_list() else: _, C, width, height = net.get_shape().as_list() if", "= C // G net = tf.reshape(net, [N, G, CG,", "tf.nn.avg_pool(cam, ksize=[1, 1, dropblock_size, dropblock_size], strides=[1, 1, 1, 1], padding='VALID',", "4: Uout group dropout def dropblock(net, keep_prob, dropblock_size, gap_w=None, label=None,", "tf.reshape(tf.nn.sigmoid(cam), [N, height, width, 1]) if data_format == 'channels_last' else", "activation units in convolutional layers are spatially correlated. See https://arxiv.org/pdf/1810.12890.pdf", "// 2), tf.logical_and(h_i >= int(dropblock_size // 2), h_i < width", "# grads = tf.gradients(tensor, var_list) # return [grad if grad", "else dropblock_size-left_or_top-1 # cam = tf.pad(cam, [[0, 0], [0, 0],", "dropped together. DropBlock works better than dropout on convolutional layers", "if data_format == 'channels_last' else [2, 3], keepdims=True) else: if", "if data_format == 'channels_last' else 0) randnoise = tf.random_uniform(tf.shape(net), dtype=tf.float32)", "height, width, channels]`. Returns: A version of input tensor with", "net.get_shape().as_list() # else: # N, C, height, width = net.get_shape().as_list()", "topk) chan_mask = tf.expand_dims(chan_mask, 2) # [N, C, 1] chan_mask", "Returns: A version of input tensor with DropBlock applied. Raises:", "C, 1] chan_mask = tf.expand_dims(chan_mask, 2) # [N, C, 1,", "dropblock_size**2 / (width - dropblock_size + 1)**2 cam_mask = _get_cam(net,", "+ tf.matmul(net, weights, transpose_a=True) # [N, width*height, 1] # cam_mean", "tf.float32) / tf.cast(tf.size(block_pattern), tf.float32) net = net / tf.cast(percent_ones, net.dtype)", "height, _ = net.get_shape().as_list() else: _, _, width, height =", "# [N, 1] # topk = tf.gather(topk, indices=tf.cast(seed_drop_rate*shape[-1], tf.int32)-1, axis=1)", "net)[0] # if not gap_w is None: # # Normalizing", "_get_gradcam(net, cost=None, gap_w=None, data_format='channels_first'): # # Conv layer tensor [?,2048,10,10]", "tf.reshape(cam, [N, height*width, 1]) if data_format == 'channels_last' else tf.reshape(cam,", "valid_block_center = tf.expand_dims(valid_block_center, 0) # for channel randnoise = tf.random_uniform([N,", "net.get_shape().as_list() else: _, C, width, height = net.get_shape().as_list() if width", "C, H, W] gap_w : [gap_C, num_of_class] ''' if data_format", "= tf.logical_or(spt_mask, chan_mask) # chan_mask = tf.reshape(tf.nn.softmax(cam), [N*C, height*width]) if", "width, height, C = net.get_shape().as_list() else: _, C, width, height", "# cam_mask = tf.expand_dims(cam_mask, 2) # [N, C, 1] #", "!= height: raise ValueError('Input tensor with width!=height is not supported.')", "gamma parameter of DropBlcok. seed_drop_rate = (1.0 - keep_prob) *", "weights, transpose_a=True) # [N, width*height, 1] # cam_mean = tf.maximum(tf.matmul(weights,", "cost=None, gap_w=None, data_format='channels_first'): # # Conv layer tensor [?,2048,10,10] #", "\"None\" means no DropBlock. dropblock_size: `int` size of blocks to", "net.dtype) * tf.cast(block_pattern, net.dtype) net = tf.reshape(net, [N, height, width,", "flag > 0 else None if not gap_w is None:", "< topk) # # cam_mask = tf.cast(tf.equal(cam_mask, tf.reduce_max(cam_mask, reduction_indices=[1], keepdims=True)),", "# cam = tf.reshape(cam, [N, height, width, 1]) if data_format", "tf.float32) net = net / tf.cast(percent_ones, net.dtype) * tf.cast(block_pattern, net.dtype)", "net = tf.reshape(net, [N, height, width, C]) if data_format ==", "dropblock(net, keep_prob, dropblock_size, gap_w=None, label=None, G=None, CG=None, data_format='channels_first'): \"\"\"DropBlock: a", "on convolutional layers due to the fact that activation units", "'channels_last': N, height, width, C = net.get_shape().as_list() else: N, C,", "(1.0 - keep_prob) * width**2 / dropblock_size**2 / ( width", "group dropout def dropblock(net, keep_prob, dropblock_size, gap_w=None, label=None, G=None, CG=None,", "FullyConnected import tensorflow as tf __all__ = ['dropblock', 'dropblock2','dropblock3','dropblock4'] #", "spt_mask = tf.not_equal(cam, tf.reduce_max(cam, reduction_indices=[2], keepdims=True)) # cam = tf.reshape(cam,", "`[batch, channels, height, width]` or \"channels_last for `[batch, height, width,", "height, width, 1]) if data_format == 'channels_last' else tf.reshape(cam, [N,", "`str` either \"channels_first\" for `[batch, channels, height, width]` or \"channels_last", "tf.gradients(cost, net)[0] # if not gap_w is None: # #", "= tf.convert_to_tensor(gap_w, tf.float32) gap_C, num = tf.squeeze(gap_w, 0).get_shape().as_list() # [gap_C,", "seed_drop_rate = (1.0 - keep_prob) * width**2 * G**2 /", "# Passing through ReLU # cam = cam / tf.reduce_max(cam,", "= tf.reshape(net, [N, G, CG, height, width]) dropblock_size = min(dropblock_size,", "1, height, width]) # channel k = tf.cast(C/8, tf.int32) topk,", "tf.not_equal(cam, tf.reduce_max(cam, reduction_indices=[2], keepdims=True)) # cam = tf.reshape(cam, [N, height,", "= tf.expand_dims(chan_mask, 2) # [N, C, 1] chan_mask = tf.expand_dims(chan_mask,", "C, 1] # net = tf.reshape(net, [N, height*width, C]) if", "dropblock_size + 1)**2) seed_drop_rate = (1.0 - keep_prob) * width**2", "minval=0, maxval=1) # y = logits - tf.log(-tf.log(U + eps)", "gap_w=None, label=None, G=None, CG=None, data_format='channels_first'): \"\"\"DropBlock: a regularization method for", "[N, G, CG, height, width]) dropblock_size = min(dropblock_size, width) #", "# seed_drop_rate = (1.0 - keep_prob) * width**2 * G**2", "tf.math.top_k(cam, k=k) # [N, 1, k] topk = tf.gather(topk, indices=[k-1],", "\"\"\" mimic GN \"\"\" ctx = get_current_tower_context() is_training = bool(ctx.is_training)", "1, dropblock_size, dropblock_size] block_pattern = tf.reduce_max(-block_pattern, reduction_indices=[2]) block_pattern = -tf.nn.max_pool(block_pattern," ]
[ "tree. # from rlstructures import logging from rlstructures.env_wrappers import GymEnv,", "import GymEnv, GymEnvInf from rlstructures.tools import weight_init import torch.nn as", "envs.append(e) return GymEnvInf(envs, seed) def create_agent(model, n_actions=1): return RecurrentAgent(model=model, n_actions=n_actions)", "= create_gym_env(env_name) e = TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnvInf(envs, seed)", "and its affiliates. # # This source code is licensed", "import numpy as np import torch.nn.functional as F from tutorial.tutorial_recurrent_policy.agent", "in the # LICENSE file in the root directory of", "e = TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnv(envs, seed) def create_train_env(n_envs,", "under the MIT license found in the # LICENSE file", "nn import copy import torch import time import numpy as", "== \"__main__\": # We use spawn mode such that most", "We use spawn mode such that most of the environment", "if __name__ == \"__main__\": # We use spawn mode such", "torch import time import numpy as np import torch.nn.functional as", "gym from gym.wrappers import TimeLimit # We write the 'create_env'", "config, create_env, create_train_env, create_agent): super().__init__(config, create_env, create_train_env, create_agent) if __name__", "rlstructures.tools import weight_init import torch.nn as nn import copy import", "= TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnv(envs, seed) def create_train_env(n_envs, env_name=None,", "def create_train_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs = [] for k", "A2C import gym from gym.wrappers import TimeLimit # We write", "Copyright (c) Facebook, Inc. and its affiliates. # # This", "= create_gym_env(env_name) e = TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnv(envs, seed)", "LICENSE file in the root directory of this source tree.", "# # Copyright (c) Facebook, Inc. and its affiliates. #", "1.0, \"entropy_coef\": 0.01, \"a2c_coef\": 1.0, \"logdir\": \"./results\", } exp =", "its affiliates. # # This source code is licensed under", "directory of this source tree. # from rlstructures import logging", "create_agent) if __name__ == \"__main__\": # We use spawn mode", "function in the main file to allow these functions to", "copy import torch import time import numpy as np import", "in range(n_envs): e = create_gym_env(env_name) e = TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e)", "max_episode_steps=max_episode_steps) envs.append(e) return GymEnv(envs, seed) def create_train_env(n_envs, env_name=None, max_episode_steps=None, seed=None):", "use spawn mode such that most of the environment will", "__init__(self, config, create_env, create_train_env, create_agent): super().__init__(config, create_env, create_train_env, create_agent) if", "TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnvInf(envs, seed) def create_agent(model, n_actions=1): return", "environment will run in multiple processes import torch.multiprocessing as mp", "mp.set_start_method(\"spawn\") config = { \"env_name\": \"CartPole-v0\", \"a2c_timesteps\": 3, \"n_envs\": 4,", "e = create_gym_env(env_name) e = TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnvInf(envs,", "seed=None): envs = [] for k in range(n_envs): e =", "RecurrentAgent from tutorial.tutorial_recurrent_policy.a2c import A2C import gym from gym.wrappers import", "'create_env' and 'create_agent' function in the main file to allow", "mp mp.set_start_method(\"spawn\") config = { \"env_name\": \"CartPole-v0\", \"a2c_timesteps\": 3, \"n_envs\":", "\"env_name\": \"CartPole-v0\", \"a2c_timesteps\": 3, \"n_envs\": 4, \"max_episode_steps\": 100, \"env_seed\": 42,", "torch.nn.functional as F from tutorial.tutorial_recurrent_policy.agent import RecurrentAgent from tutorial.tutorial_recurrent_policy.a2c import", "max_episode_steps=max_episode_steps) envs.append(e) return GymEnvInf(envs, seed) def create_agent(model, n_actions=1): return RecurrentAgent(model=model,", "100, \"env_seed\": 42, \"n_threads\": 4, \"n_evaluation_threads\": 2, \"n_evaluation_episodes\": 256, \"time_limit\":", "import gym from gym.wrappers import TimeLimit # We write the", "\"lr\": 0.001, \"discount_factor\": 0.95, \"critic_coef\": 1.0, \"entropy_coef\": 0.01, \"a2c_coef\": 1.0,", "for k in range(n_envs): e = create_gym_env(env_name) e = TimeLimit(e,", "such that most of the environment will run in multiple", "in the main file to allow these functions to be", "that most of the environment will run in multiple processes", "F from tutorial.tutorial_recurrent_policy.agent import RecurrentAgent from tutorial.tutorial_recurrent_policy.a2c import A2C import", "# LICENSE file in the root directory of this source", "the MIT license found in the # LICENSE file in", "numpy as np import torch.nn.functional as F from tutorial.tutorial_recurrent_policy.agent import", "gym.make(env_name) def create_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs = [] for", "e = create_gym_env(env_name) e = TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnv(envs,", "run in multiple processes import torch.multiprocessing as mp mp.set_start_method(\"spawn\") config", "the root directory of this source tree. # from rlstructures", "1.0, \"logdir\": \"./results\", } exp = Experiment(config, create_env, create_train_env, create_agent)", "found in the # LICENSE file in the root directory", "Experiment(A2C): def __init__(self, config, create_env, create_train_env, create_agent): super().__init__(config, create_env, create_train_env,", "these functions to be used with pickle when creating the", "the environment will run in multiple processes import torch.multiprocessing as", "tutorial.tutorial_recurrent_policy.a2c import A2C import gym from gym.wrappers import TimeLimit #", "GymEnvInf from rlstructures.tools import weight_init import torch.nn as nn import", "creating the batcher processes def create_gym_env(env_name): return gym.make(env_name) def create_env(n_envs,", "\"n_threads\": 4, \"n_evaluation_threads\": 2, \"n_evaluation_episodes\": 256, \"time_limit\": 3600, \"lr\": 0.001,", "# from rlstructures import logging from rlstructures.env_wrappers import GymEnv, GymEnvInf", "license found in the # LICENSE file in the root", "import torch.nn.functional as F from tutorial.tutorial_recurrent_policy.agent import RecurrentAgent from tutorial.tutorial_recurrent_policy.a2c", "create_env, create_train_env, create_agent): super().__init__(config, create_env, create_train_env, create_agent) if __name__ ==", "the 'create_env' and 'create_agent' function in the main file to", "\"a2c_timesteps\": 3, \"n_envs\": 4, \"max_episode_steps\": 100, \"env_seed\": 42, \"n_threads\": 4,", "rlstructures import logging from rlstructures.env_wrappers import GymEnv, GymEnvInf from rlstructures.tools", "def create_agent(model, n_actions=1): return RecurrentAgent(model=model, n_actions=n_actions) class Experiment(A2C): def __init__(self,", "n_actions=n_actions) class Experiment(A2C): def __init__(self, config, create_env, create_train_env, create_agent): super().__init__(config,", "to allow these functions to be used with pickle when", "import torch.nn as nn import copy import torch import time", "We write the 'create_env' and 'create_agent' function in the main", "and 'create_agent' function in the main file to allow these", "from rlstructures import logging from rlstructures.env_wrappers import GymEnv, GymEnvInf from", "rlstructures.env_wrappers import GymEnv, GymEnvInf from rlstructures.tools import weight_init import torch.nn", "as nn import copy import torch import time import numpy", "functions to be used with pickle when creating the batcher", "create_train_env, create_agent) if __name__ == \"__main__\": # We use spawn", "in multiple processes import torch.multiprocessing as mp mp.set_start_method(\"spawn\") config =", "\"CartPole-v0\", \"a2c_timesteps\": 3, \"n_envs\": 4, \"max_episode_steps\": 100, \"env_seed\": 42, \"n_threads\":", "\"n_envs\": 4, \"max_episode_steps\": 100, \"env_seed\": 42, \"n_threads\": 4, \"n_evaluation_threads\": 2,", "envs.append(e) return GymEnv(envs, seed) def create_train_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs", "create_train_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs = [] for k in", "4, \"max_episode_steps\": 100, \"env_seed\": 42, \"n_threads\": 4, \"n_evaluation_threads\": 2, \"n_evaluation_episodes\":", "# We use spawn mode such that most of the", "torch.nn as nn import copy import torch import time import", "logging from rlstructures.env_wrappers import GymEnv, GymEnvInf from rlstructures.tools import weight_init", "super().__init__(config, create_env, create_train_env, create_agent) if __name__ == \"__main__\": # We", "\"env_seed\": 42, \"n_threads\": 4, \"n_evaluation_threads\": 2, \"n_evaluation_episodes\": 256, \"time_limit\": 3600,", "from gym.wrappers import TimeLimit # We write the 'create_env' and", "when creating the batcher processes def create_gym_env(env_name): return gym.make(env_name) def", "TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnv(envs, seed) def create_train_env(n_envs, env_name=None, max_episode_steps=None,", "file in the root directory of this source tree. #", "be used with pickle when creating the batcher processes def", "the batcher processes def create_gym_env(env_name): return gym.make(env_name) def create_env(n_envs, env_name=None,", "of this source tree. # from rlstructures import logging from", "as np import torch.nn.functional as F from tutorial.tutorial_recurrent_policy.agent import RecurrentAgent", "(c) Facebook, Inc. and its affiliates. # # This source", "class Experiment(A2C): def __init__(self, config, create_env, create_train_env, create_agent): super().__init__(config, create_env,", "multiple processes import torch.multiprocessing as mp mp.set_start_method(\"spawn\") config = {", "will run in multiple processes import torch.multiprocessing as mp mp.set_start_method(\"spawn\")", "is licensed under the MIT license found in the #", "seed) def create_train_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs = [] for", "range(n_envs): e = create_gym_env(env_name) e = TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return", "time import numpy as np import torch.nn.functional as F from", "def create_gym_env(env_name): return gym.make(env_name) def create_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs", "the # LICENSE file in the root directory of this", "from rlstructures.env_wrappers import GymEnv, GymEnvInf from rlstructures.tools import weight_init import", "main file to allow these functions to be used with", "to be used with pickle when creating the batcher processes", "create_gym_env(env_name) e = TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnvInf(envs, seed) def", "allow these functions to be used with pickle when creating", "import TimeLimit # We write the 'create_env' and 'create_agent' function", "create_gym_env(env_name) e = TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnv(envs, seed) def", "e = TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnvInf(envs, seed) def create_agent(model,", "0.001, \"discount_factor\": 0.95, \"critic_coef\": 1.0, \"entropy_coef\": 0.01, \"a2c_coef\": 1.0, \"logdir\":", "0.95, \"critic_coef\": 1.0, \"entropy_coef\": 0.01, \"a2c_coef\": 1.0, \"logdir\": \"./results\", }", "as F from tutorial.tutorial_recurrent_policy.agent import RecurrentAgent from tutorial.tutorial_recurrent_policy.a2c import A2C", "\"discount_factor\": 0.95, \"critic_coef\": 1.0, \"entropy_coef\": 0.01, \"a2c_coef\": 1.0, \"logdir\": \"./results\",", "\"entropy_coef\": 0.01, \"a2c_coef\": 1.0, \"logdir\": \"./results\", } exp = Experiment(config,", "envs = [] for k in range(n_envs): e = create_gym_env(env_name)", "spawn mode such that most of the environment will run", "# Copyright (c) Facebook, Inc. and its affiliates. # #", "in the root directory of this source tree. # from", "RecurrentAgent(model=model, n_actions=n_actions) class Experiment(A2C): def __init__(self, config, create_env, create_train_env, create_agent):", "return GymEnvInf(envs, seed) def create_agent(model, n_actions=1): return RecurrentAgent(model=model, n_actions=n_actions) class", "create_env, create_train_env, create_agent) if __name__ == \"__main__\": # We use", "import copy import torch import time import numpy as np", "[] for k in range(n_envs): e = create_gym_env(env_name) e =", "GymEnvInf(envs, seed) def create_agent(model, n_actions=1): return RecurrentAgent(model=model, n_actions=n_actions) class Experiment(A2C):", "create_agent): super().__init__(config, create_env, create_train_env, create_agent) if __name__ == \"__main__\": #", "\"time_limit\": 3600, \"lr\": 0.001, \"discount_factor\": 0.95, \"critic_coef\": 1.0, \"entropy_coef\": 0.01,", "This source code is licensed under the MIT license found", "this source tree. # from rlstructures import logging from rlstructures.env_wrappers", "gym.wrappers import TimeLimit # We write the 'create_env' and 'create_agent'", "'create_agent' function in the main file to allow these functions", "import torch.multiprocessing as mp mp.set_start_method(\"spawn\") config = { \"env_name\": \"CartPole-v0\",", "env_name=None, max_episode_steps=None, seed=None): envs = [] for k in range(n_envs):", "config = { \"env_name\": \"CartPole-v0\", \"a2c_timesteps\": 3, \"n_envs\": 4, \"max_episode_steps\":", "the main file to allow these functions to be used", "def create_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs = [] for k", "from tutorial.tutorial_recurrent_policy.a2c import A2C import gym from gym.wrappers import TimeLimit", "42, \"n_threads\": 4, \"n_evaluation_threads\": 2, \"n_evaluation_episodes\": 256, \"time_limit\": 3600, \"lr\":", "\"a2c_coef\": 1.0, \"logdir\": \"./results\", } exp = Experiment(config, create_env, create_train_env,", "pickle when creating the batcher processes def create_gym_env(env_name): return gym.make(env_name)", "= TimeLimit(e, max_episode_steps=max_episode_steps) envs.append(e) return GymEnvInf(envs, seed) def create_agent(model, n_actions=1):", "create_agent(model, n_actions=1): return RecurrentAgent(model=model, n_actions=n_actions) class Experiment(A2C): def __init__(self, config,", "code is licensed under the MIT license found in the", "processes def create_gym_env(env_name): return gym.make(env_name) def create_env(n_envs, env_name=None, max_episode_steps=None, seed=None):", "return gym.make(env_name) def create_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs = []", "max_episode_steps=None, seed=None): envs = [] for k in range(n_envs): e", "= [] for k in range(n_envs): e = create_gym_env(env_name) e", "TimeLimit # We write the 'create_env' and 'create_agent' function in", "of the environment will run in multiple processes import torch.multiprocessing", "torch.multiprocessing as mp mp.set_start_method(\"spawn\") config = { \"env_name\": \"CartPole-v0\", \"a2c_timesteps\":", "source code is licensed under the MIT license found in", "batcher processes def create_gym_env(env_name): return gym.make(env_name) def create_env(n_envs, env_name=None, max_episode_steps=None,", "Facebook, Inc. and its affiliates. # # This source code", "import torch import time import numpy as np import torch.nn.functional", "def __init__(self, config, create_env, create_train_env, create_agent): super().__init__(config, create_env, create_train_env, create_agent)", "licensed under the MIT license found in the # LICENSE", "import time import numpy as np import torch.nn.functional as F", "root directory of this source tree. # from rlstructures import", "file to allow these functions to be used with pickle", "256, \"time_limit\": 3600, \"lr\": 0.001, \"discount_factor\": 0.95, \"critic_coef\": 1.0, \"entropy_coef\":", "GymEnv(envs, seed) def create_train_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs = []", "tutorial.tutorial_recurrent_policy.agent import RecurrentAgent from tutorial.tutorial_recurrent_policy.a2c import A2C import gym from", "3600, \"lr\": 0.001, \"discount_factor\": 0.95, \"critic_coef\": 1.0, \"entropy_coef\": 0.01, \"a2c_coef\":", "0.01, \"a2c_coef\": 1.0, \"logdir\": \"./results\", } exp = Experiment(config, create_env,", "# # This source code is licensed under the MIT", "# We write the 'create_env' and 'create_agent' function in the", "3, \"n_envs\": 4, \"max_episode_steps\": 100, \"env_seed\": 42, \"n_threads\": 4, \"n_evaluation_threads\":", "import logging from rlstructures.env_wrappers import GymEnv, GymEnvInf from rlstructures.tools import", "processes import torch.multiprocessing as mp mp.set_start_method(\"spawn\") config = { \"env_name\":", "\"logdir\": \"./results\", } exp = Experiment(config, create_env, create_train_env, create_agent) exp.run()", "from rlstructures.tools import weight_init import torch.nn as nn import copy", "create_gym_env(env_name): return gym.make(env_name) def create_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs =", "MIT license found in the # LICENSE file in the", "affiliates. # # This source code is licensed under the", "Inc. and its affiliates. # # This source code is", "mode such that most of the environment will run in", "\"n_evaluation_threads\": 2, \"n_evaluation_episodes\": 256, \"time_limit\": 3600, \"lr\": 0.001, \"discount_factor\": 0.95,", "n_actions=1): return RecurrentAgent(model=model, n_actions=n_actions) class Experiment(A2C): def __init__(self, config, create_env,", "= { \"env_name\": \"CartPole-v0\", \"a2c_timesteps\": 3, \"n_envs\": 4, \"max_episode_steps\": 100,", "# This source code is licensed under the MIT license", "GymEnv, GymEnvInf from rlstructures.tools import weight_init import torch.nn as nn", "import weight_init import torch.nn as nn import copy import torch", "import RecurrentAgent from tutorial.tutorial_recurrent_policy.a2c import A2C import gym from gym.wrappers", "from tutorial.tutorial_recurrent_policy.agent import RecurrentAgent from tutorial.tutorial_recurrent_policy.a2c import A2C import gym", "import A2C import gym from gym.wrappers import TimeLimit # We", "\"n_evaluation_episodes\": 256, \"time_limit\": 3600, \"lr\": 0.001, \"discount_factor\": 0.95, \"critic_coef\": 1.0,", "with pickle when creating the batcher processes def create_gym_env(env_name): return", "\"max_episode_steps\": 100, \"env_seed\": 42, \"n_threads\": 4, \"n_evaluation_threads\": 2, \"n_evaluation_episodes\": 256,", "{ \"env_name\": \"CartPole-v0\", \"a2c_timesteps\": 3, \"n_envs\": 4, \"max_episode_steps\": 100, \"env_seed\":", "source tree. # from rlstructures import logging from rlstructures.env_wrappers import", "\"__main__\": # We use spawn mode such that most of", "create_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs = [] for k in", "np import torch.nn.functional as F from tutorial.tutorial_recurrent_policy.agent import RecurrentAgent from", "write the 'create_env' and 'create_agent' function in the main file", "as mp mp.set_start_method(\"spawn\") config = { \"env_name\": \"CartPole-v0\", \"a2c_timesteps\": 3,", "return RecurrentAgent(model=model, n_actions=n_actions) class Experiment(A2C): def __init__(self, config, create_env, create_train_env,", "seed) def create_agent(model, n_actions=1): return RecurrentAgent(model=model, n_actions=n_actions) class Experiment(A2C): def", "create_train_env, create_agent): super().__init__(config, create_env, create_train_env, create_agent) if __name__ == \"__main__\":", "4, \"n_evaluation_threads\": 2, \"n_evaluation_episodes\": 256, \"time_limit\": 3600, \"lr\": 0.001, \"discount_factor\":", "2, \"n_evaluation_episodes\": 256, \"time_limit\": 3600, \"lr\": 0.001, \"discount_factor\": 0.95, \"critic_coef\":", "used with pickle when creating the batcher processes def create_gym_env(env_name):", "__name__ == \"__main__\": # We use spawn mode such that", "weight_init import torch.nn as nn import copy import torch import", "\"critic_coef\": 1.0, \"entropy_coef\": 0.01, \"a2c_coef\": 1.0, \"logdir\": \"./results\", } exp", "most of the environment will run in multiple processes import", "return GymEnv(envs, seed) def create_train_env(n_envs, env_name=None, max_episode_steps=None, seed=None): envs =", "k in range(n_envs): e = create_gym_env(env_name) e = TimeLimit(e, max_episode_steps=max_episode_steps)", "<gh_stars>100-1000 # # Copyright (c) Facebook, Inc. and its affiliates." ]
[ "alt import pydeck as pdk import os import glob from", "wc = WordCloud(background_color=\"white\", max_words=100, width=2000, height=800, colormap='tab20') wc.generate_from_frequencies(worte) return st.image(wc.to_array())", "gesamt: {relations.replace(',','.')}\") def systematik(): #Ranking der meistverwendeten GND-Systematik-Notationen classification =", "title='Entität'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP", "entity_df = entity_df.rename(columns={\"index\":\"Datenart\", 0:\"Anzahl\"}) st.subheader('Datenherkunft der GND-Entitäten in DNB-Titeldaten') st.write('Weniger", "im Bestand der DNB befinden. In den Titeldaten ist auch", "in dict: worte.update({record['sachbegriff']:record['count']}) wc = WordCloud(background_color=\"white\", max_words=100, width=2000, height=800, colormap='tab20')", "('alle', \"Tp - Personen\", \"Tb - Körperschaften\", \"Tg - Geografika\",", "title='Satzart'), alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP 10 {satzart} in DNB-Titeldaten')", "als ein Drittel der GND-Entitäten in DNB-Titeldaten wurde in intellektuellen", "sachbegriffe eines auszuwählenden tages der letzten 10 werktage st.header('TOP 100", "intellktuell verknüpften GND-Entitäten in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links_unique.csv\", \"r\") as f:", "radius_min_pixels=1, radius_max_pixels=100, line_width_min_pixels=1, get_position='[lon, lat]', get_radius=\"Anzahl\", get_fill_color=[255, 140, 0], get_line_color=[0,", "get_radius=\"Anzahl\", get_fill_color=[255, 140, 0], get_line_color=[0, 0, 0] ) st.pydeck_chart(pdk.Deck( scatterplotlayer,", "daten_filter = st.select_slider('Wählen Sie ein Datum', options=daten, value=daten[-1]) df =", "werden. Die Art der Verlinkung wird über einen Relationierungscode beschrieben.", "inkl. Zeitschriftendatenbank (ZDB), sofern sich Exemplare der Zeitschrift im Bestand", "maschinell erzeugt wurden, aus Fremddaten stammen oder verwaist sind, wurden", "'count': 'Anzahl'}, inplace=True) st.header('TOP Wirkungsorte von GND-Personen') st.markdown('Von allen Personensätzen", "GND-Sätze ab Januar 1972 created_at = pd.read_csv(f'{path}/../stats/gnd_created_at.csv', index_col='created_at', parse_dates=True, header=0,", "ein Drittel der GND-Entitäten in DNB-Titeldaten wurde in intellektuellen Erschließungsprozessen", "= alt.Chart(top_daten).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'),", "meistverwendeten wirkungsorte aller personen in der gnd df = pd.read_csv(f'{path}/wirkungsorte-top50.csv')", "<NAME> geschrieben. Sie gehören zur Python Community der Deutschen Nationalbibliothek.')", "sind, wurden nicht in die Auswertung einbezogen. Eine detaillierte Auflistung", "[pica.rs](https://github.com/deutsche-nationalbibliothek/pica-rs) gefiltert. Dieses Tool produziert aus dem sehr großen Gesamtabzug", "min_value=1400, max_value=int(musiker_orte['jahrzehnt'].max()), value=(1900), step=10) musik_filt= musiker_orte.loc[(musiker_orte['jahrzehnt'] == limiter)] musik_filt['norm']=(musik_filt['count']-musik_filt['count'].min())/(musik_filt['count'].max()-musik_filt['count'].min()) #Karte", "tooltip='Anzahl:N' ) st.altair_chart(entities, use_container_width=True) else: with open(f\"{path}/../stats/title_gnd_mean_{satzart[:2]}.csv\", \"r\") as f:", "title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_ts_count, use_container_width=True) def", "{uniques_str.replace(',','.')} GND-Entitäten in den DNB-Titeldaten. Durchschnittlich {mean} GND-Verknüpfungen pro DNB-Titeldatensatz\")", "index_col=False) st.subheader('Systematik der Sachbegriffe') st.write('Die Entitäten der GND können in", "GND-Verknüpfungen pro DNB-Titeldatensatz with open(f\"{path}/../stats/title_gnd_mean.csv\", \"r\") as f: mean =", "opacity=0.8, get_position='[lon, lat]', pickable=True, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, radiusscale=100, line_width_min_pixels=1,", "1972') created_filt = st.slider('Zeitraum', 1972, 2021, (1972,2021), 1) created =", "Katalogisierungslevel df = pd.read_csv(f'{path}/../stats/gnd_entity_types.csv', index_col=False, names=['entity','count']) df['level'] = df.entity.str[2:3] df.entity", "def gnd_top(): #TOP 10 GND-Entitäten in DNB-Titeldaten, nach Satzart gefiltert", "step=10) musik_filt= musiker_orte.loc[(musiker_orte['jahrzehnt'] == limiter)] musik_filt['norm']=(musik_filt['count']-musik_filt['count'].min())/(musik_filt['count'].max()-musik_filt['count'].min()) #Karte INITIAL_VIEW_STATE = pdk.ViewState(", "bearing=0 ) scatterplotlayer = pdk.Layer( \"ScatterplotLayer\", df, pickable=True, opacity=0.5, stroked=True,", "uniques, \"Entitäten aus automatischen Prozessen\": auto_entites, \"Entitäten aus Fremddaten\": fremd_entities},", "der Systematik von Ts-Sätzen classification_ts = pd.read_csv(f'{path}/../stats/gnd_classification_Ts_all.csv', index_col=False) st.subheader('Systematik der", "Relationierungscode beschrieben. Hier sind die am häufigsten verwendeten Relationierungscodes zu", "#Anzahl der intellktuell verknüpften GND-Entitäten in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links_unique.csv\", \"r\")", "Verknüpfungen zu {uniques_str.replace(',','.')} GND-Entitäten in den DNB-Titeldaten. Durchschnittlich {mean} GND-Verknüpfungen", "5, len(rels), 10, 1) relation_count = alt.Chart(rels.nlargest(rels_filt, 'count', keep='all')).mark_bar().encode( alt.X('code',", "st.image(wc.to_array()) def wirkungsorte(): #ranking und karte der meistverwendeten wirkungsorte aller", "uniques = int(f.read()) uniques_str = f'{uniques:,}' #Durchschnittliche Anzahl an GND-Verknüpfungen", "import streamlit_analytics path = os.path.dirname(__file__) streamlit_analytics.start_tracking() @st.cache def load_gnd_top_daten(typ): gnd_top_df", "alt.Y('sum(Anzahl):Q', title='Anzahl'), color='Datenart', tooltip='Anzahl:N' ) st.altair_chart(entities, use_container_width=True) else: with open(f\"{path}/../stats/title_gnd_mean_{satzart[:2]}.csv\",", "zu {uniques_str.replace(',','.')} GND-Entitäten in den DNB-Titeldaten. Durchschnittlich {mean} GND-Verknüpfungen pro", "sachbegriff_cloud() systematik_ts() dnb = st.beta_container() with dnb: st.header('GND in der", "import streamlit as st import pandas as pd import altair", "gnd_top(): #TOP 10 GND-Entitäten in DNB-Titeldaten, nach Satzart gefiltert if", "systematik(): #Ranking der meistverwendeten GND-Systematik-Notationen classification = pd.read_csv(f'{path}/../stats/gnd_classification_all.csv', index_col=False) st.subheader('Systematik')", "pdk.Layer( \"ScatterplotLayer\", musik_filt, opacity=0.8, get_position='[lon, lat]', pickable=True, stroked=True, filled=True, radius_min_pixels=1,", "alt.X('gnd_id:O', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'),", "max_value=len(classification_ts), value=10, step=1) classification_ts_count = alt.Chart(classification_ts.nlargest(class_ts_filt, 'count', keep='all')).mark_bar().encode( alt.X('id:N', title='Notation',", "Sätze, die in den letzten 365 Tagen angelegt wurden.') newcomer_daten", "= pd.read_csv(f'{path}/../stats/title_gnd_newcomer_top10.csv', index_col=None) newcomer = alt.Chart(newcomer_daten).mark_bar().encode( alt.X('gnd_id', title='Entitäten', sort='-y'), alt.Y('count',", "i += 1 def gesamt_entity_count(): #Gesamtzahl der GND-Entitäten with open(f\"{path}/../stats/gnd_entity_count.csv\",", "[alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_ts_count, use_container_width=True)", "Diagramme wurden mit [Altair](https://altair-viz.github.io/index.html) erstellt, die Karten mit [Deck GL](https://deck.gl/)", "'count']) st.subheader('Zeitverlauf der GND-Datensatzerstellung') st.write('Auf einer Zeitleiste wird die Anzahl", "with open(f\"{path}/../stats/title_gnd_mean.csv\", \"r\") as f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f\"{links.replace(',','.')} intellektuell", "[Streamlit](https://streamlit.io/) geschrieben. Die Skripte sowie die gefilterten CSV-Rohdaten sind auf", "col1, col2 = st.beta_columns(2) i = 1 for index, row", "\"ScatterplotLayer\", df, pickable=True, opacity=0.5, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, line_width_min_pixels=1, get_position='[lon,", "for x in files] daten.sort() daten_filter = st.select_slider('Wählen Sie ein", "GND-Sachbegriffe dieses Tages. Die Größe des Begriffes entspricht der Häufigkeit", "auf.') #Balkendiagramm orte_filt = st.slider('Zeige Top …', min_value=3, max_value=len(df), value=10,", "#besondere widgets für einzelne satzarten if satzart == \"Tp -", "der Sachbegriffe (Ts) aufgetragen. Die Liste der möglichen Notationen gibt", "angelegt wurden.') newcomer_daten = pd.read_csv(f'{path}/../stats/title_gnd_newcomer_top10.csv', index_col=None) newcomer = alt.Chart(newcomer_daten).mark_bar().encode( alt.X('gnd_id',", "st.write(f'Durchschnittlich {mean} Verknüpfungen zu {satzart}-Sätzen pro DNB-Titeldatensatz') #main st.title('GND-Dashboard') #infoebereich", "dict = df.to_dict(orient='records') worte = {} for record in dict:", "Musikkultur, dargestellt auf einer Karte musiker_orte = pd.read_csv(f'{path}/musiker_orte.csv', sep='\\t', index_col='idn')", "Anzahl an GND-Verknüpfungen pro DNB-Titeldatensatz with open(f\"{path}/../stats/title_gnd_mean.csv\", \"r\") as f:", "und daraus abgeleitete Zentren der Musikkultur, dargestellt auf einer Karte", "aufgetragen. Die Liste der möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_ts_filt", "der möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_filt = st.slider('Zeige Top", "dieses Tages. Die Größe des Begriffes entspricht der Häufigkeit des", "Tool produziert aus dem sehr großen Gesamtabzug (~ 31 GB)", "PICA+ vor. Die Daten werden mithilfe des Pica-Parsers [pica.rs](https://github.com/deutsche-nationalbibliothek/pica-rs) gefiltert.", "finden. Die Diagramme wurden mit [Altair](https://altair-viz.github.io/index.html) erstellt, die Karten mit", "GND-Entitäten können in verschiedenen Katalogisierungsleveln (1-7) angelegt werden. Je niedriger", ") st.altair_chart(relation_count, use_container_width=True) with open(f\"{path}/../stats/gnd_relation_count.csv\", \"r\") as f: relations =", "ist auch der Tonträger- und Notenbestand des Deutschen Musikarchivs (DMA)", "{relations.replace(',','.')}\") def systematik(): #Ranking der meistverwendeten GND-Systematik-Notationen classification = pd.read_csv(f'{path}/../stats/gnd_classification_all.csv',", "statistiken in abhängigkeit der satzart if satzart == 'alle': gesamt_entity_count()", "erlaubt Rückschlüsse auf die musikalischen Zentren, wie sie im Bestand", "path = os.path.dirname(__file__) streamlit_analytics.start_tracking() @st.cache def load_gnd_top_daten(typ): gnd_top_df = pd.DataFrame()", "des Dashboards und sehen Sie eine Wordcloud der 100 meistverwendeten", "\"r\") as f: relations = f'{int(f.read()):,}' st.write(f\"Relationen zwischen Entitäten gesamt:", "st.subheader('Systematik') st.write('Die Entitäten der GND können in eine Systematik eingeordnet", "Begriffes entspricht der Häufigkeit des Sachbegriffs.') files = glob.glob(f'{path}/../stats/*Ts-count.csv') daten", "st.subheader(f'TOP 10 GND-Newcomer') st.write('TOP 10 der GND-Entitäten, die in den", "GND-Relationierungscodes rels = pd.read_csv(f'{path}/../stats/gnd_codes_all.csv', index_col=False) st.subheader('Relationen') st.write('GND-Datensätze können mit anderen", "Verknüpfungen mit den Titeldaten der Deutschen Nationalbibliothek (Stand der Daten:", "df.to_dict(orient='records') worte = {} for record in dict: worte.update({record['sachbegriff']:record['count']}) wc", "in der gnd df = pd.read_csv(f'{path}/wirkungsorte-top50.csv') df.drop(columns=['id'], inplace=True) df.rename(columns={'name': 'Name',", "dict: worte.update({record['sachbegriff']:record['count']}) wc = WordCloud(background_color=\"white\", max_words=100, width=2000, height=800, colormap='tab20') wc.generate_from_frequencies(worte)", "gespeichert. Alle Skripte und Daten stehen unter CC0 Lizenz und", "Datenherkunft\"): st.markdown(''' Datengrundlage ist ein Gesamtabzug der Daten der Gemeinsamen", "der Entitäten, die in den letzten 365 Tagen erstellt wurden", "Sie erhalten die verfügbaren Auswertungen und Statstiken. Verwenden Sie einen", "einbezogen. Eine detaillierte Auflistung der ausgewerteten Felder ist im [GitHub-Repository](https://git.io/JG5vN)", "verwendeten Relationierungscodes zu sehen. Die Auflösung der wichtigsten Codes gibt", "- Körperschaften\", \"Tg - Geografika\", \"Ts - Sachbegriffe\", \"Tu -", "alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip('entity', title='Satzart'),", "pd.read_csv(f'{path}/../stats/title_gnd_newcomer_top10.csv', index_col=None) newcomer = alt.Chart(newcomer_daten).mark_bar().encode( alt.X('gnd_id', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'),", "= load_gnd_top_daten('newcomer_top10') newcomer = alt.Chart(newcomer_daten.loc[newcomer_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:O', title='Entitäten', sort='-y'), alt.Y('count',", "title='Anzahl')] ) return st.altair_chart(classification_ts_count, use_container_width=True) def zeitverlauf(): #zeitverlauf der erstellung", "\"r\") as f: uniques = int(f.read()) uniques_str = f'{uniques:,}' #Durchschnittliche", "letzten Aktualisierung der Daten des Dashboards und sehen Sie eine", "GND können in eine Systematik eingeordnet werden. Hier sind die", "Statstiken. Verwenden Sie einen auf Chromium basierenden Browser.') with st.beta_expander(\"Methodik", "<= 5: with col1: st.write(f'{i}. {row[\"name\"]}') elif i > 5:", "alt.X('id', title='Notation', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title=\"Bezeichnung\"), tooltip=[alt.Tooltip('id', title='Notation'),", "sort='-y', title='Bezeichnung'), tooltip = [alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')]", "die Daten, weil Sie dann von qualifizierten Personen erstellt bzw.", "- Geografika\": wirkungsorte_musik() wirkungsorte() elif satzart == \"Ts - Sachbegriffe\":", "0], get_line_color=[0, 0, 0] ) st.pydeck_chart(pdk.Deck( scatterplotlayer, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\":", "alt.Tooltip('count', title='Anzahl')] ) st.subheader('Entitäten und Katalogisierungslevel') else: entity_count = alt.Chart(df.loc[df['entity'].str.startswith(satzart[:2])]).mark_bar().encode(", "verlinkt (»relationiert«) werden. Die Art der Verlinkung wird über einen", "for record in dict: worte.update({record['sachbegriff']:record['count']}) wc = WordCloud(background_color=\"white\", max_words=100, width=2000,", "- Geografika\", \"Ts - Sachbegriffe\", \"Tu - Werke\", \"Tf -", "DNB-Titeldaten') top_daten = load_gnd_top_daten('top10') gnd_top = alt.Chart(top_daten.loc[top_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten',", "[a-wendler](https://github.com/a-wendler/) sowie <NAME> geschrieben. Sie gehören zur Python Community der", "altair as alt import pydeck as pdk import os import", "get_fill_color=[255, 140, 0], get_line_color=[0, 0, 0] ) st.pydeck_chart(pdk.Deck( scatterplotlayer, initial_view_state=INITIAL_VIEW_STATE,", "auswertung der GND-Musikwerke, Musik-Personen und Wikrungsorte und daraus abgeleitete Zentren", "Karte musiker_orte = pd.read_csv(f'{path}/musiker_orte.csv', sep='\\t', index_col='idn') st.header('Wirkungszentren der Musik 1400–2010')", "INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 ) scatterplotlayer", "Sie links die Satzart, die Sie interessiert, und Sie erhalten", "st import pandas as pd import altair as alt import", "entity_count = alt.Chart(df.loc[df['entity'].str.startswith(satzart[:2])]).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level',", "#Durchschnittliche Anzahl an GND-Verknüpfungen pro DNB-Titeldatensatz with open(f\"{path}/../stats/title_gnd_mean.csv\", \"r\") as", "monatlich erstellten GND-Sätze aufgetragen. Die ersten Sätze stammen aus dem", "eines auszuwählenden tages der letzten 10 werktage st.header('TOP 100 Sachbegriffe", "newcomer(): #TOP 10 der Entitäten, die in den letzten 365", "= entity_df.rename(columns={\"index\":\"Datenart\", 0:\"Anzahl\"}) st.subheader('Datenherkunft der GND-Entitäten in DNB-Titeldaten') st.write('Weniger als", "aus den letzten 10 Werktagen vor der letzten Aktualisierung der", "im [GitHub-Repository](https://git.io/JG5vN) dieses Dashboards dokumentiert.') st.altair_chart(gnd_top, use_container_width=True) def dnb_links(): #GND-Verknüpfungen", "Bestand der DNB repräsentiert sind.') limiter = st.slider('Jahresfilter', min_value=1400, max_value=int(musiker_orte['jahrzehnt'].max()),", "alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('count:Q',", "alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_count, use_container_width=True) def systematik_ts():", "scatterplotlayer, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{Name}</b><br \\>Wirkungsort von {Anzahl} Personen\"})) def", "st.sidebar.info('Diese Widgets haben die GitHub-User [niko2342](https://github.com/niko2342/), [ramonvoges](https://github.com/ramonvoges), [a-wendler](https://github.com/a-wendler/) sowie <NAME>", "title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP 10 {satzart} GND-Newcomer') st.write(f'TOP", "Fremddaten\": fremd_entities}, orient = \"index\").reset_index() entity_df = entity_df.rename(columns={\"index\":\"Datenart\", 0:\"Anzahl\"}) st.subheader('Datenherkunft", "with col2: st.write(f'{i}. {row[\"name\"]}') i += 1 def gesamt_entity_count(): #Gesamtzahl", "[ramonvoges](https://github.com/ramonvoges), [a-wendler](https://github.com/a-wendler/) sowie <NAME> geschrieben. Sie gehören zur Python Community", "Nationalbibliothek (Stand der Daten: Juli 2021). Wählen Sie links die", "Geografika\": wirkungsorte_musik() wirkungsorte() elif satzart == \"Ts - Sachbegriffe\": sachbegriff_cloud()", "get_position='[lon, lat]', pickable=True, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, radiusscale=100, line_width_min_pixels=1, get_radius=\"norm*50000\",", "{mean} GND-Verknüpfungen pro DNB-Titeldatensatz\") entity_df = pd.DataFrame.from_dict({\"intellektuell verknüpfte Entitäten\": uniques,", "options=daten, value=daten[-1]) df = pd.read_csv(f'{path}/../stats/{daten_filter}-Ts-count.csv') dict = df.to_dict(orient='records') worte =", "der GND-Entitäten in DNB-Titeldaten wurde in intellektuellen Erschließungsprozessen vergeben. Jeweils", "Januar 1972') created_filt = st.slider('Zeitraum', 1972, 2021, (1972,2021), 1) created", "pickable=True, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, radiusscale=100, line_width_min_pixels=1, get_radius=\"norm*50000\", get_fill_color=[50, 168,", "erstellten GND-Sätze aufgetragen. Die ersten Sätze stammen aus dem Januar", "werden. Die Liste der möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_filt", "mean = str(round(float(f.read()),2)).replace('.',',') st.write(f'Durchschnittlich {mean} Verknüpfungen zu {satzart}-Sätzen pro DNB-Titeldatensatz')", "stammen aus dem Januar 1972') created_filt = st.slider('Zeitraum', 1972, 2021,", "pro Monat'), tooltip=['count'] ) return st.altair_chart(created, use_container_width=True) def entities(): #GND-Entitäten", "10 werktage st.header('TOP 100 Sachbegriffe pro Tag') st.write('Wählen Sie ein", "und sehen Sie eine Wordcloud der 100 meistverwendeten GND-Sachbegriffe dieses", "(DMA) sowie der Buch- und Objektbestand des Deutschen Buch- und", "= st.slider('Zeige TOP …', min_value=5, max_value=len(classification_ts), value=10, step=1) classification_ts_count =", "alt.Chart(entity_df).mark_bar().encode( alt.X('sum(Datenart):N', title='Datenart'), alt.Y('sum(Anzahl):Q', title='Anzahl'), color='Datenart', tooltip='Anzahl:N' ) st.altair_chart(entities, use_container_width=True)", "alt.Chart(created_at[f'{created_filt[0]}':f'{created_filt[1]}'].reset_index()).mark_line().encode( alt.X('created_at:T', title='Erstelldatum'), alt.Y('count:Q', title='Sätze pro Monat'), tooltip=['count'] ) return", "0:\"Anzahl\"}) st.subheader('Datenherkunft der GND-Entitäten in DNB-Titeldaten') st.write('Weniger als ein Drittel", "import altair as alt import pydeck as pdk import os", "können in eine Systematik eingeordnet werden. Die Liste der möglichen", "wurden.') newcomer_daten = pd.read_csv(f'{path}/../stats/title_gnd_newcomer_top10.csv', index_col=None) newcomer = alt.Chart(newcomer_daten).mark_bar().encode( alt.X('gnd_id', title='Entitäten',", "Top ...', 5, len(rels), 10, 1) relation_count = alt.Chart(rels.nlargest(rels_filt, 'count',", "5: with col2: st.write(f'{i}. {row[\"name\"]}') i += 1 def gesamt_entity_count():", "Felder ist im [GitHub-Repository](https://git.io/JG5vN) dieses Dashboards dokumentiert.') st.altair_chart(gnd_top, use_container_width=True) def", "st.altair_chart(relation_count, use_container_width=True) with open(f\"{path}/../stats/gnd_relation_count.csv\", \"r\") as f: relations = f'{int(f.read()):,}'", "eine Systematik eingeordnet werden. Die Liste der möglichen Notationen gibt", "= \"index\").reset_index() entity_df = entity_df.rename(columns={\"index\":\"Datenart\", 0:\"Anzahl\"}) st.subheader('Datenherkunft der GND-Entitäten in", "aller personen in der gnd df = pd.read_csv(f'{path}/wirkungsorte-top50.csv') df.drop(columns=['id'], inplace=True)", "die mit Python weiterverarbeitet werden. Das Dashboard ist mit dem", "Dieses Tool produziert aus dem sehr großen Gesamtabzug (~ 31", "der GND und ihrer Verknüpfungen mit den Titeldaten der Deutschen", "mit satzartenfilter st.sidebar.header(\"Satzart wählen\") satzart = st.sidebar.selectbox( \"Über welche GND-Satzart", "entities(): #GND-Entitäten nach Satzart und Katalogisierungslevel df = pd.read_csv(f'{path}/../stats/gnd_entity_types.csv', index_col=False,", "DNB Titeldaten if satzart == 'alle': #Anzahl GND-Verknüpfungen in DNB-Titeldaten", "wir [streamlit-analytics](https://pypi.org/project/streamlit-analytics/). Dabei werden keine personenbezogenen Daten gespeichert. Alle Skripte", "Hier sind die Systematik-Notationen der Sachbegriffe (Ts) aufgetragen. Die Liste", "relationen(): #Top 10 der GND-Relationierungscodes rels = pd.read_csv(f'{path}/../stats/gnd_codes_all.csv', index_col=False) st.subheader('Relationen')", "der {limiter}er') col1, col2 = st.beta_columns(2) i = 1 for", "title='Ort'), alt.Tooltip('Anzahl:Q', title='Anzahl')] ) st.altair_chart(graph_count, use_container_width=True) #Karte INITIAL_VIEW_STATE = pdk.ViewState(", "classification = pd.read_csv(f'{path}/../stats/gnd_classification_all.csv', index_col=False) st.subheader('Systematik') st.write('Die Entitäten der GND können", "title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N',", "st.title('GND-Dashboard') #infoebereich oben with st.beta_container(): st.info('Hier finden Sie statistische Auswertungen", "geschrieben. Sie gehören zur Python Community der Deutschen Nationalbibliothek.') gnd_allgemein", "ersten Sätze stammen aus dem Januar 1972') created_filt = st.slider('Zeitraum',", "gnd_allgemein = st.beta_container() with gnd_allgemein: st.header('GND Statistik allgemein') #allgemeine statistiken", "Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')]", "geschrieben. Die Skripte sowie die gefilterten CSV-Rohdaten sind auf [Github](https://github.com/buchmuseum/GND_Dashboard)", "aufgetragen. Die ersten Sätze stammen aus dem Januar 1972') created_filt", "\"<b>{Name}</b><br \\>Wirkungsort von {Anzahl} Personen\"})) def wirkungsorte_musik(): #nach jahrzehnten zwischen", "#ranking und karte der meistverwendeten wirkungsorte aller personen in der", "Katalogisierungslevel') else: entity_count = alt.Chart(df.loc[df['entity'].str.startswith(satzart[:2])]).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity',", "satzartenfilter st.sidebar.header(\"Satzart wählen\") satzart = st.sidebar.selectbox( \"Über welche GND-Satzart möchten", "Drittel wurde in maschinellen Erschließungsprozessen vergeben, ca. ein Drittel stammt", "col1: st.write(f'{i}. {row[\"name\"]}') elif i > 5: with col2: st.write(f'{i}.", "gesamt_entity_count() entities() newcomer() zeitverlauf() relationen() systematik() else: entities() newcomer() #besondere", "Verknüpfungen zu {satzart}-Sätzen pro DNB-Titeldatensatz') #main st.title('GND-Dashboard') #infoebereich oben with", "wirkungsorte(): #ranking und karte der meistverwendeten wirkungsorte aller personen in", "DNB-Titeldaten') top_daten = pd.read_csv(f'{path}/../stats/title_gnd_top10.csv', index_col=None) gnd_top = alt.Chart(top_daten).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten',", "gefiltert if satzart == 'alle': st.subheader(f'TOP 10 GND-Entitäten in DNB-Titeldaten')", "tooltip = [alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return", "welche GND-Satzart möchten Sie etwas erfahren?\", ('alle', \"Tp - Personen\",", "in eine Systematik eingeordnet werden. Hier sind die Systematik-Notationen der", "der Daten: Juli 2021). Wählen Sie links die Satzart, die", "alt.X('code', title='Relationierungs-Code', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('code', sort='-y', title='Relationierungscode'), tooltip=[alt.Tooltip('count', title='Anzahl'),", "as pdk import os import glob from wordcloud import WordCloud", "max_zoom=16, bearing=0 ) musiker_scatter = pdk.Layer( \"ScatterplotLayer\", musik_filt, opacity=0.8, get_position='[lon,", "\"Ts - Sachbegriffe\": sachbegriff_cloud() systematik_ts() dnb = st.beta_container() with dnb:", "GND-Entitäten in DNB-Titeldaten wurde in intellektuellen Erschließungsprozessen vergeben. Jeweils ca.", "step=1) graph_count = alt.Chart(df.nlargest(orte_filt, 'Anzahl', keep='all')).mark_bar().encode( alt.X('Name:N', sort='y'), alt.Y('Anzahl'), alt.Color('Name:N',", "Katalogisierungsleveln (1-7) angelegt werden. Je niedriger das Katalogisierungslevel, desto verlässlicher", "import os import glob from wordcloud import WordCloud import streamlit_analytics", "'alle': #Anzahl GND-Verknüpfungen in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links.csv\", \"r\") as f:", "mit [Deck GL](https://deck.gl/) (via [Pydeck](https://deckgl.readthedocs.io/en/latest/#)), die Wordcloud mit [wordcloud](https://amueller.github.io/word_cloud/index.html). Für", "sind die Systematik-Notationen der Sachbegriffe (Ts) aufgetragen. Die Liste der", "die musikalischen Zentren, wie sie im Bestand der DNB repräsentiert", "zwischen Entitäten gesamt: {relations.replace(',','.')}\") def systematik(): #Ranking der meistverwendeten GND-Systematik-Notationen", "as f: links = f'{int(f.read()):,}' #GND-Entitäten maschinell verknüpft with open(f\"{path}/../stats/title_gnd_links_auto.csv\",", "st.altair_chart(created, use_container_width=True) def entities(): #GND-Entitäten nach Satzart und Katalogisierungslevel df", "werden keine personenbezogenen Daten gespeichert. Alle Skripte und Daten stehen", "die GitHub-User [niko2342](https://github.com/niko2342/), [ramonvoges](https://github.com/ramonvoges), [a-wendler](https://github.com/a-wendler/) sowie <NAME> geschrieben. Sie gehören", "get_radius=\"norm*50000\", get_fill_color=[50, 168, 92], get_line_color=[39, 71, 51] ) st.pydeck_chart(pdk.Deck( musiker_scatter,", ") st.subheader('Entitäten und Katalogisierungslevel') else: entity_count = alt.Chart(df.loc[df['entity'].str.startswith(satzart[:2])]).mark_bar().encode( alt.X('sum(count)', title='Datensätze", "- Werke\", \"Tf - Veranstaltungen\") ) st.sidebar.info('Diese Widgets haben die", "Wikrungsorte und daraus abgeleitete Zentren der Musikkultur, dargestellt auf einer", "return st.altair_chart(created, use_container_width=True) def entities(): #GND-Entitäten nach Satzart und Katalogisierungslevel", "DNB-Titeldaten. Durchschnittlich {mean} GND-Verknüpfungen pro DNB-Titeldatensatz\") entity_df = pd.DataFrame.from_dict({\"intellektuell verknüpfte", "gehören zur Python Community der Deutschen Nationalbibliothek.') gnd_allgemein = st.beta_container()", "title='Relationierungs-Code', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('code', sort='-y', title='Relationierungscode'), tooltip=[alt.Tooltip('count', title='Anzahl'), alt.Tooltip('code',", "st.subheader(f'TOP 10 GND-Entitäten in DNB-Titeldaten') top_daten = pd.read_csv(f'{path}/../stats/title_gnd_top10.csv', index_col=None) gnd_top", "= df.entity.str[:2] if satzart == 'alle': entity_count = alt.Chart(df).mark_bar().encode( alt.X('sum(count)',", "jeweiligen Person auf.') #Balkendiagramm orte_filt = st.slider('Zeige Top …', min_value=3,", "aus Fremddaten.') entities = alt.Chart(entity_df).mark_bar().encode( alt.X('sum(Datenart):N', title='Datenart'), alt.Y('sum(Anzahl):Q', title='Anzahl'), color='Datenart',", "relation_count = alt.Chart(rels.nlargest(rels_filt, 'count', keep='all')).mark_bar().encode( alt.X('code', title='Relationierungs-Code', sort='-y'), alt.Y('count', title='Anzahl'),", "Tagen erstellt wurden if satzart == 'alle': st.subheader(f'TOP 10 GND-Newcomer')", "{} for record in dict: worte.update({record['sachbegriff']:record['count']}) wc = WordCloud(background_color=\"white\", max_words=100,", "einen Relationierungscode beschrieben. Hier sind die am häufigsten verwendeten Relationierungscodes", ") st.altair_chart(entities, use_container_width=True) else: with open(f\"{path}/../stats/title_gnd_mean_{satzart[:2]}.csv\", \"r\") as f: mean", "gnd_top_df = pd.DataFrame() for file in glob.glob(f'{path}/../stats/title_gnd_{typ}_*.csv'): gnd_top_df = gnd_top_df.append(pd.read_csv(file,", "latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 ) musiker_scatter = pdk.Layer( \"ScatterplotLayer\",", "pd.read_csv(f'{path}/../stats/gnd_entity_types.csv', index_col=False, names=['entity','count']) df['level'] = df.entity.str[2:3] df.entity = df.entity.str[:2] if", "dieses Dashboards dokumentiert.') st.altair_chart(gnd_top, use_container_width=True) def dnb_links(): #GND-Verknüpfungen in DNB", "import pydeck as pdk import os import glob from wordcloud", "Körperschaften\", \"Tg - Geografika\", \"Ts - Sachbegriffe\", \"Tu - Werke\",", "limiter = st.slider('Jahresfilter', min_value=1400, max_value=int(musiker_orte['jahrzehnt'].max()), value=(1900), step=10) musik_filt= musiker_orte.loc[(musiker_orte['jahrzehnt'] ==", "{satzart} Sätze, die in den letzten 365 Tagen angelegt wurden.')", "satzart == 'alle': st.subheader(f'TOP 10 GND-Entitäten in DNB-Titeldaten') top_daten =", "= alt.Chart(top_daten.loc[top_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y',", "st.write(f'TOP 10 der {satzart} Sätze, die in den letzten 365", "Titel von Musikern und deren Wirkungszeiten erlaubt Rückschlüsse auf die", "die Anzahl der monatlich erstellten GND-Sätze aufgetragen. Die ersten Sätze", "die gefilterten CSV-Rohdaten sind auf [Github](https://github.com/buchmuseum/GND_Dashboard) zu finden. Die Diagramme", "= st.beta_columns(2) i = 1 for index, row in musik_filt.nlargest(10,", "alt.Chart(df).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip('entity',", "st.subheader('Zeitverlauf der GND-Datensatzerstellung') st.write('Auf einer Zeitleiste wird die Anzahl der", "im Bestand der DNB repräsentiert sind.') limiter = st.slider('Jahresfilter', min_value=1400,", "satzart = st.sidebar.selectbox( \"Über welche GND-Satzart möchten Sie etwas erfahren?\",", "pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 ) musiker_scatter = pdk.Layer(", "gibt es [hier](http://www.dnb.de/gndsyst).') class_ts_filt = st.slider('Zeige TOP …', min_value=5, max_value=len(classification_ts),", "pd.read_csv(f'{path}/../stats/gnd_created_at.csv', index_col='created_at', parse_dates=True, header=0, names=['created_at', 'count']) st.subheader('Zeitverlauf der GND-Datensatzerstellung') st.write('Auf", "erstellt wurden if satzart == 'alle': st.subheader(f'TOP 10 GND-Newcomer') st.write('TOP", "> 5: with col2: st.write(f'{i}. {row[\"name\"]}') i += 1 def", "st.write('Wählen Sie ein Datum aus den letzten 10 Werktagen vor", "index_col=False) st.subheader('Systematik') st.write('Die Entitäten der GND können in eine Systematik", "if satzart == 'alle': st.subheader(f'TOP 10 GND-Entitäten in DNB-Titeldaten') top_daten", "f: links = f'{int(f.read()):,}' #GND-Entitäten maschinell verknüpft with open(f\"{path}/../stats/title_gnd_links_auto.csv\", \"r\")", "der GND-Sätze ab Januar 1972 created_at = pd.read_csv(f'{path}/../stats/gnd_created_at.csv', index_col='created_at', parse_dates=True,", "radius_max_pixels=100, radiusscale=100, line_width_min_pixels=1, get_radius=\"norm*50000\", get_fill_color=[50, 168, 92], get_line_color=[39, 71, 51]", "index_col=None) newcomer = alt.Chart(newcomer_daten).mark_bar().encode( alt.X('gnd_id', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name',", "alt.Y('count:Q', title='Sätze pro Monat'), tooltip=['count'] ) return st.altair_chart(created, use_container_width=True) def", "st.beta_expander(\"Methodik und Datenherkunft\"): st.markdown(''' Datengrundlage ist ein Gesamtabzug der Daten", "Werke\", \"Tf - Veranstaltungen\") ) st.sidebar.info('Diese Widgets haben die GitHub-User", "sowie der Buch- und Objektbestand des Deutschen Buch- und Schriftmuseums", "erstellt bzw. überprüft wurden.') return st.altair_chart(entity_count, use_container_width=True) def newcomer(): #TOP", "header=0, names=['created_at', 'count']) st.subheader('Zeitverlauf der GND-Datensatzerstellung') st.write('Auf einer Zeitleiste wird", "st.write('Die Entitäten der GND können in eine Systematik eingeordnet werden.", "'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader('Entitäten und Katalogisierungslevel') else: entity_count", "GND-Newcomer') st.write(f'TOP 10 der {satzart} Sätze, die in den letzten", "st.write('Eine Auswertung der veröffentlichten Titel von Musikern und deren Wirkungszeiten", "Die Daten werden mithilfe des Pica-Parsers [pica.rs](https://github.com/deutsche-nationalbibliothek/pica-rs) gefiltert. Dieses Tool", "Skripte sowie die gefilterten CSV-Rohdaten sind auf [Github](https://github.com/buchmuseum/GND_Dashboard) zu finden.", "line_width_min_pixels=1, get_radius=\"norm*50000\", get_fill_color=[50, 168, 92], get_line_color=[39, 71, 51] ) st.pydeck_chart(pdk.Deck(", "Personen\", \"Tb - Körperschaften\", \"Tg - Geografika\", \"Ts - Sachbegriffe\",", "get_fill_color=[50, 168, 92], get_line_color=[39, 71, 51] ) st.pydeck_chart(pdk.Deck( musiker_scatter, initial_view_state=INITIAL_VIEW_STATE,", "alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q',", "Rückschlüsse auf die musikalischen Zentren, wie sie im Bestand der", "Die Daten werden monatlich aktualisiert. ''') #sidebar mit satzartenfilter st.sidebar.header(\"Satzart", "title='Anzahl')] ) return st.altair_chart(classification_count, use_container_width=True) def systematik_ts(): #Ranking der Systematik", "INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 ) musiker_scatter", "f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f'Durchschnittlich {mean} Verknüpfungen zu {satzart}-Sätzen pro", "= st.beta_container() with gnd_allgemein: st.header('GND Statistik allgemein') #allgemeine statistiken in", "alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_ts_count, use_container_width=True) def zeitverlauf(): #zeitverlauf der", "alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_count, use_container_width=True) def systematik_ts(): #Ranking der", "height=800, colormap='tab20') wc.generate_from_frequencies(worte) return st.image(wc.to_array()) def wirkungsorte(): #ranking und karte", "Anzahl der monatlich erstellten GND-Sätze aufgetragen. Die ersten Sätze stammen", "und Katalogisierungslevel') else: entity_count = alt.Chart(df.loc[df['entity'].str.startswith(satzart[:2])]).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'),", "DNB-Titeldaten wurde in intellektuellen Erschließungsprozessen vergeben. Jeweils ca. ein weiteres", "Die Diagramme wurden mit [Altair](https://altair-viz.github.io/index.html) erstellt, die Karten mit [Deck", "verwenden wir [streamlit-analytics](https://pypi.org/project/streamlit-analytics/). Dabei werden keine personenbezogenen Daten gespeichert. Alle", "0] ) st.pydeck_chart(pdk.Deck( scatterplotlayer, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{Name}</b><br \\>Wirkungsort von", "filled=True, radius_min_pixels=1, radius_max_pixels=100, radiusscale=100, line_width_min_pixels=1, get_radius=\"norm*50000\", get_fill_color=[50, 168, 92], get_line_color=[39,", "weisen 782.682 Angaben zum Wirkungsort der jeweiligen Person auf.') #Balkendiagramm", "def systematik(): #Ranking der meistverwendeten GND-Systematik-Notationen classification = pd.read_csv(f'{path}/../stats/gnd_classification_all.csv', index_col=False)", "Die ersten Sätze stammen aus dem Januar 1972') created_filt =", "Exemplare der Zeitschrift im Bestand der DNB befinden. In den", "satzarten if satzart == \"Tp - Personen\": wirkungsorte() elif satzart", "die Systematik-Notationen der Sachbegriffe (Ts) aufgetragen. Die Liste der möglichen", "Top …', min_value=3, max_value=len(df), value=10, step=1) graph_count = alt.Chart(df.nlargest(orte_filt, 'Anzahl',", "das Katalogisierungslevel, desto verlässlicher die Daten, weil Sie dann von", "und Objektbestand des Deutschen Buch- und Schriftmuseums (DBSM) nachgewiesen. Der", "pd.read_csv(f'{path}/../stats/gnd_classification_Ts_all.csv', index_col=False) st.subheader('Systematik der Sachbegriffe') st.write('Die Entitäten der GND können", "Eine detaillierte Auflistung der ausgewerteten Felder ist im [GitHub-Repository](https://git.io/JG5vN) dieses", "st.sidebar.header(\"Satzart wählen\") satzart = st.sidebar.selectbox( \"Über welche GND-Satzart möchten Sie", "Sie dann von qualifizierten Personen erstellt bzw. überprüft wurden.') return", "entity_df = pd.DataFrame.from_dict({\"intellektuell verknüpfte Entitäten\": uniques, \"Entitäten aus automatischen Prozessen\":", "keep='all')).mark_bar().encode( alt.X('id', title='Notation', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title=\"Bezeichnung\"), tooltip=[alt.Tooltip('id',", "= load_gnd_top_daten('top10') gnd_top = alt.Chart(top_daten.loc[top_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q',", "(via [Pydeck](https://deckgl.readthedocs.io/en/latest/#)), die Wordcloud mit [wordcloud](https://amueller.github.io/word_cloud/index.html). Für grundlegende Zugriffsstatistik verwenden", "1400–2010') st.write('Eine Auswertung der veröffentlichten Titel von Musikern und deren", "alt.Color('name:N', sort='-y', title='Bezeichnung'), tooltip = [alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count',", "10 GND-Newcomer') st.write('TOP 10 der GND-Entitäten, die in den letzten", "\\>Wirkungsort von {Anzahl} Personen\"})) def wirkungsorte_musik(): #nach jahrzehnten zwischen 1400", "os.path.dirname(__file__) streamlit_analytics.start_tracking() @st.cache def load_gnd_top_daten(typ): gnd_top_df = pd.DataFrame() for file", "in DNB-Titeldaten') top_daten = load_gnd_top_daten('top10') gnd_top = alt.Chart(top_daten.loc[top_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:N',", "vor der letzten Aktualisierung der Daten des Dashboards und sehen", "alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] )", "in intellektuellen Erschließungsprozessen vergeben. Jeweils ca. ein weiteres Drittel wurde", "lat]', get_radius=\"Anzahl\", get_fill_color=[255, 140, 0], get_line_color=[0, 0, 0] ) st.pydeck_chart(pdk.Deck(", "= alt.Chart(classification_ts.nlargest(class_ts_filt, 'count', keep='all')).mark_bar().encode( alt.X('id:N', title='Notation', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N',", "der Buch- und Objektbestand des Deutschen Buch- und Schriftmuseums (DBSM)", "title='Anzahl')] ) else: st.subheader(f'TOP 10 {satzart} in DNB-Titeldaten') top_daten =", "gnd_top_df = gnd_top_df.append(pd.read_csv(file, index_col=None)) return gnd_top_df def sachbegriff_cloud(): #wordcloud der", "Die Liste der möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_ts_filt =", "Auswertungen der GND und ihrer Verknüpfungen mit den Titeldaten der", "der letzten 10 werktage st.header('TOP 100 Sachbegriffe pro Tag') st.write('Wählen", "import WordCloud import streamlit_analytics path = os.path.dirname(__file__) streamlit_analytics.start_tracking() @st.cache def", "Durchschnittlich {mean} GND-Verknüpfungen pro DNB-Titeldatensatz\") entity_df = pd.DataFrame.from_dict({\"intellektuell verknüpfte Entitäten\":", "…', min_value=5, max_value=len(classification_ts), value=10, step=1) classification_ts_count = alt.Chart(classification_ts.nlargest(class_ts_filt, 'count', keep='all')).mark_bar().encode(", "legend=alt.Legend(columns=2)), tooltip=[alt.Tooltip('Name:N', title='Ort'), alt.Tooltip('Anzahl:Q', title='Anzahl')] ) st.altair_chart(graph_count, use_container_width=True) #Karte INITIAL_VIEW_STATE", "def zeitverlauf(): #zeitverlauf der erstellung der GND-Sätze ab Januar 1972", "es [hier](https://wiki.dnb.de/download/attachments/51283696/Codeliste_ABCnachCode_Webseite_2012-07.pdf).') rels_filt = st.slider('Zeige Top ...', 5, len(rels), 10,", "GND und ihrer Verknüpfungen mit den Titeldaten der Deutschen Nationalbibliothek", "'Anzahl', keep='all')).mark_bar().encode( alt.X('Name:N', sort='y'), alt.Y('Anzahl'), alt.Color('Name:N', legend=alt.Legend(columns=2)), tooltip=[alt.Tooltip('Name:N', title='Ort'), alt.Tooltip('Anzahl:Q',", "0, 0] ) st.pydeck_chart(pdk.Deck( scatterplotlayer, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{Name}</b><br \\>Wirkungsort", "\"r\") as f: entities = f'{int(f.read()):,}' return st.write(f\"GND-Entitäten gesamt: {entities.replace(',','.')}\")", "== limiter)] musik_filt['norm']=(musik_filt['count']-musik_filt['count'].min())/(musik_filt['count'].max()-musik_filt['count'].min()) #Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5,", "…', 5, len(classification), 10, 1) classification_count = alt.Chart(classification.nlargest(class_filt, 'count', keep='all')).mark_bar().encode(", "in den letzten 365 Tagen angelegt wurden.') newcomer_daten = pd.read_csv(f'{path}/../stats/title_gnd_newcomer_top10.csv',", "pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip('entity', title='Satzart'), alt.Tooltip( 'level',", "st.header('TOP Wirkungsorte von GND-Personen') st.markdown('Von allen Personensätzen (Tp) weisen 782.682", "Deutschen Nationalbibliothek (Stand der Daten: Juli 2021). Wählen Sie links", "open(f\"{path}/../stats/gnd_relation_count.csv\", \"r\") as f: relations = f'{int(f.read()):,}' st.write(f\"Relationen zwischen Entitäten", "Verlinkung wird über einen Relationierungscode beschrieben. Hier sind die am", "sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title=\"Bezeichnung\"), tooltip=[alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'),", "sep='\\t', index_col='idn') st.header('Wirkungszentren der Musik 1400–2010') st.write('Eine Auswertung der veröffentlichten", ") st.altair_chart(newcomer, use_container_width=True) def gnd_top(): #TOP 10 GND-Entitäten in DNB-Titeldaten,", "Das Dashboard ist mit dem Python-Framework [Streamlit](https://streamlit.io/) geschrieben. Die Skripte", "der jeweiligen Person auf.') #Balkendiagramm orte_filt = st.slider('Zeige Top …',", "orte_filt = st.slider('Zeige Top …', min_value=3, max_value=len(df), value=10, step=1) graph_count", "= pd.read_csv(f'{path}/../stats/gnd_created_at.csv', index_col='created_at', parse_dates=True, header=0, names=['created_at', 'count']) st.subheader('Zeitverlauf der GND-Datensatzerstellung')", "satzart == 'alle': st.subheader(f'TOP 10 GND-Newcomer') st.write('TOP 10 der GND-Entitäten,", "#Balkendiagramm orte_filt = st.slider('Zeige Top …', min_value=3, max_value=len(df), value=10, step=1)", "entity_df.rename(columns={\"index\":\"Datenart\", 0:\"Anzahl\"}) st.subheader('Datenherkunft der GND-Entitäten in DNB-Titeldaten') st.write('Weniger als ein", "with st.beta_expander(\"Methodik und Datenherkunft\"): st.markdown(''' Datengrundlage ist ein Gesamtabzug der", "newcomer = alt.Chart(newcomer_daten.loc[newcomer_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:O', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name',", "st.altair_chart(entities, use_container_width=True) else: with open(f\"{path}/../stats/title_gnd_mean_{satzart[:2]}.csv\", \"r\") as f: mean =", "angelegt wurden.') newcomer_daten = load_gnd_top_daten('newcomer_top10') newcomer = alt.Chart(newcomer_daten.loc[newcomer_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:O',", "title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader('Entitäten und Katalogisierungslevel') else: entity_count =", "anderen Datensätzen verlinkt (»relationiert«) werden. Die Art der Verlinkung wird", "alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader('Entitäten und Katalogisierungslevel') else:", "(Stand der Daten: Juli 2021). Wählen Sie links die Satzart,", "index_col=None)) return gnd_top_df def sachbegriff_cloud(): #wordcloud der top 100 sachbegriffe", "created = alt.Chart(created_at[f'{created_filt[0]}':f'{created_filt[1]}'].reset_index()).mark_line().encode( alt.X('created_at:T', title='Erstelldatum'), alt.Y('count:Q', title='Sätze pro Monat'), tooltip=['count']", "in glob.glob(f'{path}/../stats/title_gnd_{typ}_*.csv'): gnd_top_df = gnd_top_df.append(pd.read_csv(file, index_col=None)) return gnd_top_df def sachbegriff_cloud():", "tages der letzten 10 werktage st.header('TOP 100 Sachbegriffe pro Tag')", "die maschinell erzeugt wurden, aus Fremddaten stammen oder verwaist sind,", "st.write(f'{i}. {row[\"name\"]}') elif i > 5: with col2: st.write(f'{i}. {row[\"name\"]}')", "[Pydeck](https://deckgl.readthedocs.io/en/latest/#)), die Wordcloud mit [wordcloud](https://amueller.github.io/word_cloud/index.html). Für grundlegende Zugriffsstatistik verwenden wir", "use_container_width=True) def zeitverlauf(): #zeitverlauf der erstellung der GND-Sätze ab Januar", "GND können in eine Systematik eingeordnet werden. Die Liste der", "wählen\") satzart = st.sidebar.selectbox( \"Über welche GND-Satzart möchten Sie etwas", "- Veranstaltungen\") ) st.sidebar.info('Diese Widgets haben die GitHub-User [niko2342](https://github.com/niko2342/), [ramonvoges](https://github.com/ramonvoges),", "werktage st.header('TOP 100 Sachbegriffe pro Tag') st.write('Wählen Sie ein Datum", "st.beta_container(): st.info('Hier finden Sie statistische Auswertungen der GND und ihrer", "with open(f\"{path}/../stats/title_gnd_links_auto.csv\", \"r\") as f: auto_entites = int(f.read()) #GND-Entitäten aus", "+= 1 def gesamt_entity_count(): #Gesamtzahl der GND-Entitäten with open(f\"{path}/../stats/gnd_entity_count.csv\", \"r\")", "if satzart == 'alle': st.subheader(f'TOP 10 GND-Newcomer') st.write('TOP 10 der", "dem Python-Framework [Streamlit](https://streamlit.io/) geschrieben. Die Skripte sowie die gefilterten CSV-Rohdaten", "GND-Entitäten in den DNB-Titeldaten. Durchschnittlich {mean} GND-Verknüpfungen pro DNB-Titeldatensatz\") entity_df", "ihrer Verknüpfungen mit den Titeldaten der Deutschen Nationalbibliothek (Stand der", "== 'alle': entity_count = alt.Chart(df).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity',", "qualifizierten Personen erstellt bzw. überprüft wurden.') return st.altair_chart(entity_count, use_container_width=True) def", "Musikarchivs (DMA) sowie der Buch- und Objektbestand des Deutschen Buch-", "in eine Systematik eingeordnet werden. Die Liste der möglichen Notationen", "title='Bezeichnung'), tooltip = [alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] )", "musiker_orte.loc[(musiker_orte['jahrzehnt'] == limiter)] musik_filt['norm']=(musik_filt['count']-musik_filt['count'].min())/(musik_filt['count'].max()-musik_filt['count'].min()) #Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392,", "else: st.subheader(f'TOP 10 {satzart} GND-Newcomer') st.write(f'TOP 10 der {satzart} Sätze,", "== 'alle': #Anzahl GND-Verknüpfungen in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links.csv\", \"r\") as", "auch der Tonträger- und Notenbestand des Deutschen Musikarchivs (DMA) sowie", "der letzten Aktualisierung der Daten des Dashboards und sehen Sie", "entspricht der Häufigkeit des Sachbegriffs.') files = glob.glob(f'{path}/../stats/*Ts-count.csv') daten =", "verschiedenen Katalogisierungsleveln (1-7) angelegt werden. Je niedriger das Katalogisierungslevel, desto", "ist ein Gesamtabzug der Daten der Gemeinsamen Normadatei (GND) sowie", "können in verschiedenen Katalogisierungsleveln (1-7) angelegt werden. Je niedriger das", "return gnd_top_df def sachbegriff_cloud(): #wordcloud der top 100 sachbegriffe eines", "[niko2342](https://github.com/niko2342/), [ramonvoges](https://github.com/ramonvoges), [a-wendler](https://github.com/a-wendler/) sowie <NAME> geschrieben. Sie gehören zur Python", "use_container_width=True) def gnd_top(): #TOP 10 GND-Entitäten in DNB-Titeldaten, nach Satzart", "Tagen angelegt wurden.') newcomer_daten = load_gnd_top_daten('newcomer_top10') newcomer = alt.Chart(newcomer_daten.loc[newcomer_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode(", "st.subheader('Datenherkunft der GND-Entitäten in DNB-Titeldaten') st.write('Weniger als ein Drittel der", "Januar 1972 created_at = pd.read_csv(f'{path}/../stats/gnd_created_at.csv', index_col='created_at', parse_dates=True, header=0, names=['created_at', 'count'])", "10 der Entitäten, die in den letzten 365 Tagen erstellt", "in maschinellen Erschließungsprozessen vergeben, ca. ein Drittel stammt aus Fremddaten.')", "einen auf Chromium basierenden Browser.') with st.beta_expander(\"Methodik und Datenherkunft\"): st.markdown('''", "entities = alt.Chart(entity_df).mark_bar().encode( alt.X('sum(Datenart):N', title='Datenart'), alt.Y('sum(Anzahl):Q', title='Anzahl'), color='Datenart', tooltip='Anzahl:N' )", "streamlit_analytics path = os.path.dirname(__file__) streamlit_analytics.start_tracking() @st.cache def load_gnd_top_daten(typ): gnd_top_df =", "(1-7) angelegt werden. Je niedriger das Katalogisierungslevel, desto verlässlicher die", "streamlit as st import pandas as pd import altair as", "links die Satzart, die Sie interessiert, und Sie erhalten die", "Werktagen vor der letzten Aktualisierung der Daten des Dashboards und", "der wichtigsten Codes gibt es [hier](https://wiki.dnb.de/download/attachments/51283696/Codeliste_ABCnachCode_Webseite_2012-07.pdf).') rels_filt = st.slider('Zeige Top", "der Tonträger- und Notenbestand des Deutschen Musikarchivs (DMA) sowie der", "f: fremd_entities = int(f.read()) #Anzahl der intellktuell verknüpften GND-Entitäten in", "nach Satzart gefiltert if satzart == 'alle': st.subheader(f'TOP 10 GND-Entitäten", "'alle': st.subheader(f'TOP 10 GND-Newcomer') st.write('TOP 10 der GND-Entitäten, die in", "max_value=len(df), value=10, step=1) graph_count = alt.Chart(df.nlargest(orte_filt, 'Anzahl', keep='all')).mark_bar().encode( alt.X('Name:N', sort='y'),", "140, 0], get_line_color=[0, 0, 0] ) st.pydeck_chart(pdk.Deck( scatterplotlayer, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT,", "monatlich aktualisiert. ''') #sidebar mit satzartenfilter st.sidebar.header(\"Satzart wählen\") satzart =", "Daten des Dashboards und sehen Sie eine Wordcloud der 100", "Nationalbibliothek.') gnd_allgemein = st.beta_container() with gnd_allgemein: st.header('GND Statistik allgemein') #allgemeine", "top 100 sachbegriffe eines auszuwählenden tages der letzten 10 werktage", "der veröffentlichten Titel von Musikern und deren Wirkungszeiten erlaubt Rückschlüsse", "Codes gibt es [hier](https://wiki.dnb.de/download/attachments/51283696/Codeliste_ABCnachCode_Webseite_2012-07.pdf).') rels_filt = st.slider('Zeige Top ...', 5,", ") st.pydeck_chart(pdk.Deck( musiker_scatter, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{name}</b>\"})) st.subheader(f'TOP 10 Wirkungszentren", "== \"Tg - Geografika\": wirkungsorte_musik() wirkungsorte() elif satzart == \"Ts", "Erschließungsprozessen vergeben. Jeweils ca. ein weiteres Drittel wurde in maschinellen", "alt.Tooltip('code', title='Code')] ) st.altair_chart(relation_count, use_container_width=True) with open(f\"{path}/../stats/gnd_relation_count.csv\", \"r\") as f:", "title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N',", "die Sie interessiert, und Sie erhalten die verfügbaren Auswertungen und", "radius_min_pixels=1, radius_max_pixels=100, radiusscale=100, line_width_min_pixels=1, get_radius=\"norm*50000\", get_fill_color=[50, 168, 92], get_line_color=[39, 71,", "oder verwaist sind, wurden nicht in die Auswertung einbezogen. Eine", "dem sehr großen Gesamtabzug (~ 31 GB) kleinere CSV-Dateien, die", "und Schriftmuseums (DBSM) nachgewiesen. Der Gesamtabzug liegt im OCLC-Format PICA+", "alt.Tooltip('count:Q', title='Anzahl')] ) st.altair_chart(newcomer, use_container_width=True) def gnd_top(): #TOP 10 GND-Entitäten", "alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip( 'level',", "allgemein') #allgemeine statistiken in abhängigkeit der satzart if satzart ==", "Je niedriger das Katalogisierungslevel, desto verlässlicher die Daten, weil Sie", "31 GB) kleinere CSV-Dateien, die mit Python weiterverarbeitet werden. Das", "classification_ts = pd.read_csv(f'{path}/../stats/gnd_classification_Ts_all.csv', index_col=False) st.subheader('Systematik der Sachbegriffe') st.write('Die Entitäten der", "st.slider('Jahresfilter', min_value=1400, max_value=int(musiker_orte['jahrzehnt'].max()), value=(1900), step=10) musik_filt= musiker_orte.loc[(musiker_orte['jahrzehnt'] == limiter)] musik_filt['norm']=(musik_filt['count']-musik_filt['count'].min())/(musik_filt['count'].max()-musik_filt['count'].min())", "GND-Entitäten with open(f\"{path}/../stats/gnd_entity_count.csv\", \"r\") as f: entities = f'{int(f.read()):,}' return", "open(f\"{path}/../stats/title_gnd_links_ext.csv\", \"r\") as f: fremd_entities = int(f.read()) #Anzahl der intellktuell", "von Ts-Sätzen classification_ts = pd.read_csv(f'{path}/../stats/gnd_classification_Ts_all.csv', index_col=False) st.subheader('Systematik der Sachbegriffe') st.write('Die", "Titeldaten der Deutschen Nationalbibliothek (DNB) inkl. Zeitschriftendatenbank (ZDB), sofern sich", "Datengrundlage ist ein Gesamtabzug der Daten der Gemeinsamen Normadatei (GND)", "uniques_str = f'{uniques:,}' #Durchschnittliche Anzahl an GND-Verknüpfungen pro DNB-Titeldatensatz with", "gnd_top = alt.Chart(top_daten.loc[top_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N',", "finden Sie statistische Auswertungen der GND und ihrer Verknüpfungen mit", "glob from wordcloud import WordCloud import streamlit_analytics path = os.path.dirname(__file__)", "jahrzehnten zwischen 1400 und 2010 gefilterte auswertung der GND-Musikwerke, Musik-Personen", "max_value=int(musiker_orte['jahrzehnt'].max()), value=(1900), step=10) musik_filt= musiker_orte.loc[(musiker_orte['jahrzehnt'] == limiter)] musik_filt['norm']=(musik_filt['count']-musik_filt['count'].min())/(musik_filt['count'].max()-musik_filt['count'].min()) #Karte INITIAL_VIEW_STATE", "werden. Hier sind die Systematik-Notationen der Sachbegriffe (Ts) aufgetragen. Die", "einzelne satzarten if satzart == \"Tp - Personen\": wirkungsorte() elif", "open(f\"{path}/../stats/gnd_entity_count.csv\", \"r\") as f: entities = f'{int(f.read()):,}' return st.write(f\"GND-Entitäten gesamt:", "der Deutschen Nationalbibliothek.') gnd_allgemein = st.beta_container() with gnd_allgemein: st.header('GND Statistik", "abgeleitete Zentren der Musikkultur, dargestellt auf einer Karte musiker_orte =", "DNB-Titeldaten, nach Satzart gefiltert if satzart == 'alle': st.subheader(f'TOP 10", "created_at = pd.read_csv(f'{path}/../stats/gnd_created_at.csv', index_col='created_at', parse_dates=True, header=0, names=['created_at', 'count']) st.subheader('Zeitverlauf der", "2021, (1972,2021), 1) created = alt.Chart(created_at[f'{created_filt[0]}':f'{created_filt[1]}'].reset_index()).mark_line().encode( alt.X('created_at:T', title='Erstelldatum'), alt.Y('count:Q', title='Sätze", "Top …', 5, len(classification), 10, 1) classification_count = alt.Chart(classification.nlargest(class_filt, 'count',", "wurden, aus Fremddaten stammen oder verwaist sind, wurden nicht in", "10 der GND-Relationierungscodes rels = pd.read_csv(f'{path}/../stats/gnd_codes_all.csv', index_col=False) st.subheader('Relationen') st.write('GND-Datensätze können", "ist mit dem Python-Framework [Streamlit](https://streamlit.io/) geschrieben. Die Skripte sowie die", "alt.Chart(newcomer_daten).mark_bar().encode( alt.X('gnd_id', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N',", "title='Datenart'), alt.Y('sum(Anzahl):Q', title='Anzahl'), color='Datenart', tooltip='Anzahl:N' ) st.altair_chart(entities, use_container_width=True) else: with", "letzten 10 werktage st.header('TOP 100 Sachbegriffe pro Tag') st.write('Wählen Sie", "[Deck GL](https://deck.gl/) (via [Pydeck](https://deckgl.readthedocs.io/en/latest/#)), die Wordcloud mit [wordcloud](https://amueller.github.io/word_cloud/index.html). Für grundlegende", "dnb = st.beta_container() with dnb: st.header('GND in der Deutschen Nationalbibliothek')", "möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_filt = st.slider('Zeige Top …',", "Systematik eingeordnet werden. Die Liste der möglichen Notationen gibt es", "entities = f'{int(f.read()):,}' return st.write(f\"GND-Entitäten gesamt: {entities.replace(',','.')}\") def relationen(): #Top", "= alt.Chart(created_at[f'{created_filt[0]}':f'{created_filt[1]}'].reset_index()).mark_line().encode( alt.X('created_at:T', title='Erstelldatum'), alt.Y('count:Q', title='Sätze pro Monat'), tooltip=['count'] )", "den Titeldaten der Deutschen Nationalbibliothek (Stand der Daten: Juli 2021).", "import title import streamlit as st import pandas as pd", "die in den letzten 365 Tagen erstellt wurden if satzart", "worte.update({record['sachbegriff']:record['count']}) wc = WordCloud(background_color=\"white\", max_words=100, width=2000, height=800, colormap='tab20') wc.generate_from_frequencies(worte) return", "gibt es [hier](https://wiki.dnb.de/download/attachments/51283696/Codeliste_ABCnachCode_Webseite_2012-07.pdf).') rels_filt = st.slider('Zeige Top ...', 5, len(rels),", "= alt.Chart(df.loc[df['entity'].str.startswith(satzart[:2])]).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'),", "Daten: Juli 2021). Wählen Sie links die Satzart, die Sie", "open(f\"{path}/../stats/title_gnd_links_unique.csv\", \"r\") as f: uniques = int(f.read()) uniques_str = f'{uniques:,}'", "zwischen 1400 und 2010 gefilterte auswertung der GND-Musikwerke, Musik-Personen und", "'count', keep='all')).mark_bar().encode( alt.X('id:N', title='Notation', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Bezeichnung'),", ") return st.altair_chart(classification_count, use_container_width=True) def systematik_ts(): #Ranking der Systematik von", "newcomer_daten = load_gnd_top_daten('newcomer_top10') newcomer = alt.Chart(newcomer_daten.loc[newcomer_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:O', title='Entitäten', sort='-y'),", "der Sachbegriffe') st.write('Die Entitäten der GND können in eine Systematik", "Entitäten\": uniques, \"Entitäten aus automatischen Prozessen\": auto_entites, \"Entitäten aus Fremddaten\":", "len(rels), 10, 1) relation_count = alt.Chart(rels.nlargest(rels_filt, 'count', keep='all')).mark_bar().encode( alt.X('code', title='Relationierungs-Code',", "Deutschen Buch- und Schriftmuseums (DBSM) nachgewiesen. Der Gesamtabzug liegt im", "\"Über welche GND-Satzart möchten Sie etwas erfahren?\", ('alle', \"Tp -", "title=\"Bezeichnung\"), tooltip=[alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_count,", "werden. Die Daten werden monatlich aktualisiert. ''') #sidebar mit satzartenfilter", "Sie ein Datum', options=daten, value=daten[-1]) df = pd.read_csv(f'{path}/../stats/{daten_filter}-Ts-count.csv') dict =", "Personen\"})) def wirkungsorte_musik(): #nach jahrzehnten zwischen 1400 und 2010 gefilterte", "with open(f\"{path}/../stats/title_gnd_links.csv\", \"r\") as f: links = f'{int(f.read()):,}' #GND-Entitäten maschinell", "alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip('entity', title='Satzart'), alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count',", "{row[\"name\"]}') i += 1 def gesamt_entity_count(): #Gesamtzahl der GND-Entitäten with", "Schriftmuseums (DBSM) nachgewiesen. Der Gesamtabzug liegt im OCLC-Format PICA+ vor.", "Daten stehen unter CC0 Lizenz und können frei weitergenutzt werden.", "Datum', options=daten, value=daten[-1]) df = pd.read_csv(f'{path}/../stats/{daten_filter}-Ts-count.csv') dict = df.to_dict(orient='records') worte", "(»relationiert«) werden. Die Art der Verlinkung wird über einen Relationierungscode", "opacity=0.5, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, line_width_min_pixels=1, get_position='[lon, lat]', get_radius=\"Anzahl\", get_fill_color=[255,", "#infoebereich oben with st.beta_container(): st.info('Hier finden Sie statistische Auswertungen der", "interessiert, und Sie erhalten die verfügbaren Auswertungen und Statstiken. Verwenden", "Auflistung der ausgewerteten Felder ist im [GitHub-Repository](https://git.io/JG5vN) dieses Dashboards dokumentiert.')", "= pdk.Layer( \"ScatterplotLayer\", musik_filt, opacity=0.8, get_position='[lon, lat]', pickable=True, stroked=True, filled=True,", "verknüpfte Entitäten\": uniques, \"Entitäten aus automatischen Prozessen\": auto_entites, \"Entitäten aus", "scatterplotlayer = pdk.Layer( \"ScatterplotLayer\", df, pickable=True, opacity=0.5, stroked=True, filled=True, radius_min_pixels=1,", "else: with open(f\"{path}/../stats/title_gnd_mean_{satzart[:2]}.csv\", \"r\") as f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f'Durchschnittlich", "der GND-Entitäten in DNB-Titeldaten') st.write('Weniger als ein Drittel der GND-Entitäten", "häufigsten verwendeten Relationierungscodes zu sehen. Die Auflösung der wichtigsten Codes", "10, 1) relation_count = alt.Chart(rels.nlargest(rels_filt, 'count', keep='all')).mark_bar().encode( alt.X('code', title='Relationierungs-Code', sort='-y'),", "st.header('TOP 100 Sachbegriffe pro Tag') st.write('Wählen Sie ein Datum aus", "x in files] daten.sort() daten_filter = st.select_slider('Wählen Sie ein Datum',", "'Name', 'count': 'Anzahl'}, inplace=True) st.header('TOP Wirkungsorte von GND-Personen') st.markdown('Von allen", "def relationen(): #Top 10 der GND-Relationierungscodes rels = pd.read_csv(f'{path}/../stats/gnd_codes_all.csv', index_col=False)", "newcomer() zeitverlauf() relationen() systematik() else: entities() newcomer() #besondere widgets für", "links = f'{int(f.read()):,}' #GND-Entitäten maschinell verknüpft with open(f\"{path}/../stats/title_gnd_links_auto.csv\", \"r\") as", "aus Fremddaten stammen oder verwaist sind, wurden nicht in die", "Widgets haben die GitHub-User [niko2342](https://github.com/niko2342/), [ramonvoges](https://github.com/ramonvoges), [a-wendler](https://github.com/a-wendler/) sowie <NAME> geschrieben.", "tooltip=[alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_count, use_container_width=True)", "aktualisiert. ''') #sidebar mit satzartenfilter st.sidebar.header(\"Satzart wählen\") satzart = st.sidebar.selectbox(", "mit anderen Datensätzen verlinkt (»relationiert«) werden. Die Art der Verlinkung", "= glob.glob(f'{path}/../stats/*Ts-count.csv') daten = [x[-23:-13] for x in files] daten.sort()", "Daten werden mithilfe des Pica-Parsers [pica.rs](https://github.com/deutsche-nationalbibliothek/pica-rs) gefiltert. Dieses Tool produziert", "1) created = alt.Chart(created_at[f'{created_filt[0]}':f'{created_filt[1]}'].reset_index()).mark_line().encode( alt.X('created_at:T', title='Erstelldatum'), alt.Y('count:Q', title='Sätze pro Monat'),", "in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links.csv\", \"r\") as f: links = f'{int(f.read()):,}'", "alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP 10 {satzart} GND-Newcomer')", "alt.Color('name', sort='-y', title=\"Bezeichnung\"), tooltip=[alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] )", "\"Tp - Personen\": wirkungsorte() elif satzart == \"Tg - Geografika\":", "satzart == \"Tg - Geografika\": wirkungsorte_musik() wirkungsorte() elif satzart ==", "[GitHub-Repository](https://git.io/JG5vN) dieses Dashboards dokumentiert.') st.altair_chart(gnd_top, use_container_width=True) def dnb_links(): #GND-Verknüpfungen in", "radiusscale=100, line_width_min_pixels=1, get_radius=\"norm*50000\", get_fill_color=[50, 168, 92], get_line_color=[39, 71, 51] )", "weitergenutzt werden. Die Daten werden monatlich aktualisiert. ''') #sidebar mit", "st.slider('Zeige Top …', 5, len(classification), 10, 1) classification_count = alt.Chart(classification.nlargest(class_filt,", "#GND-Verknüpfungen in DNB Titeldaten if satzart == 'alle': #Anzahl GND-Verknüpfungen", "streamlit_analytics.start_tracking() @st.cache def load_gnd_top_daten(typ): gnd_top_df = pd.DataFrame() for file in", "alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.altair_chart(newcomer, use_container_width=True) def gnd_top(): #TOP", "st.subheader('Systematik der Sachbegriffe') st.write('Die Entitäten der GND können in eine", "sehen Sie eine Wordcloud der 100 meistverwendeten GND-Sachbegriffe dieses Tages.", "latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 ) scatterplotlayer = pdk.Layer( \"ScatterplotLayer\",", "load_gnd_top_daten('newcomer_top10') newcomer = alt.Chart(newcomer_daten.loc[newcomer_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:O', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'),", "TOP …', min_value=5, max_value=len(classification_ts), value=10, step=1) classification_ts_count = alt.Chart(classification_ts.nlargest(class_ts_filt, 'count',", "load_gnd_top_daten('top10') gnd_top = alt.Chart(top_daten.loc[top_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'),", "(Ts) aufgetragen. Die Liste der möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).')", "DNB-Titeldatensatz\") entity_df = pd.DataFrame.from_dict({\"intellektuell verknüpfte Entitäten\": uniques, \"Entitäten aus automatischen", "zum Wirkungsort der jeweiligen Person auf.') #Balkendiagramm orte_filt = st.slider('Zeige", "def gesamt_entity_count(): #Gesamtzahl der GND-Entitäten with open(f\"{path}/../stats/gnd_entity_count.csv\", \"r\") as f:", "wichtigsten Codes gibt es [hier](https://wiki.dnb.de/download/attachments/51283696/Codeliste_ABCnachCode_Webseite_2012-07.pdf).') rels_filt = st.slider('Zeige Top ...',", "Musik 1400–2010') st.write('Eine Auswertung der veröffentlichten Titel von Musikern und", "Gesamtabzug der Daten der Gemeinsamen Normadatei (GND) sowie der Titeldaten", "782.682 Angaben zum Wirkungsort der jeweiligen Person auf.') #Balkendiagramm orte_filt", "pd.read_csv(f'{path}/../stats/{daten_filter}-Ts-count.csv') dict = df.to_dict(orient='records') worte = {} for record in", "stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, radiusscale=100, line_width_min_pixels=1, get_radius=\"norm*50000\", get_fill_color=[50, 168, 92],", "Aktualisierung der Daten des Dashboards und sehen Sie eine Wordcloud", "with open(f\"{path}/../stats/title_gnd_links_ext.csv\", \"r\") as f: fremd_entities = int(f.read()) #Anzahl der", "Sätze stammen aus dem Januar 1972') created_filt = st.slider('Zeitraum', 1972,", "keep='all')).mark_bar().encode( alt.X('Name:N', sort='y'), alt.Y('Anzahl'), alt.Color('Name:N', legend=alt.Legend(columns=2)), tooltip=[alt.Tooltip('Name:N', title='Ort'), alt.Tooltip('Anzahl:Q', title='Anzahl')]", "sind.') limiter = st.slider('Jahresfilter', min_value=1400, max_value=int(musiker_orte['jahrzehnt'].max()), value=(1900), step=10) musik_filt= musiker_orte.loc[(musiker_orte['jahrzehnt']", "newcomer_daten = pd.read_csv(f'{path}/../stats/title_gnd_newcomer_top10.csv', index_col=None) newcomer = alt.Chart(newcomer_daten).mark_bar().encode( alt.X('gnd_id', title='Entitäten', sort='-y'),", "CSV-Dateien, die mit Python weiterverarbeitet werden. Das Dashboard ist mit", "gnd_top_df def sachbegriff_cloud(): #wordcloud der top 100 sachbegriffe eines auszuwählenden", "#allgemeine statistiken in abhängigkeit der satzart if satzart == 'alle':", "Auflösung der wichtigsten Codes gibt es [hier](https://wiki.dnb.de/download/attachments/51283696/Codeliste_ABCnachCode_Webseite_2012-07.pdf).') rels_filt = st.slider('Zeige", "Python weiterverarbeitet werden. Das Dashboard ist mit dem Python-Framework [Streamlit](https://streamlit.io/)", "10 {satzart} GND-Newcomer') st.write(f'TOP 10 der {satzart} Sätze, die in", "musikalischen Zentren, wie sie im Bestand der DNB repräsentiert sind.')", "\"Tp - Personen\", \"Tb - Körperschaften\", \"Tg - Geografika\", \"Ts", "Bestand der DNB befinden. In den Titeldaten ist auch der", "= st.beta_container() with dnb: st.header('GND in der Deutschen Nationalbibliothek') gnd_top()", "created_filt = st.slider('Zeitraum', 1972, 2021, (1972,2021), 1) created = alt.Chart(created_at[f'{created_filt[0]}':f'{created_filt[1]}'].reset_index()).mark_line().encode(", "record in dict: worte.update({record['sachbegriff']:record['count']}) wc = WordCloud(background_color=\"white\", max_words=100, width=2000, height=800,", "Zeitleiste wird die Anzahl der monatlich erstellten GND-Sätze aufgetragen. Die", "else: st.subheader(f'TOP 10 {satzart} in DNB-Titeldaten') top_daten = load_gnd_top_daten('top10') gnd_top", "wirkungsorte() elif satzart == \"Ts - Sachbegriffe\": sachbegriff_cloud() systematik_ts() dnb", "tooltip=[alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader(f'Katalogisierungslevel in Satzart {satzart}')", "use_container_width=True) def newcomer(): #TOP 10 der Entitäten, die in den", "open(f\"{path}/../stats/title_gnd_links.csv\", \"r\") as f: links = f'{int(f.read()):,}' #GND-Entitäten maschinell verknüpft", "= alt.Chart(newcomer_daten).mark_bar().encode( alt.X('gnd_id', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'),", "in den letzten 365 Tagen angelegt wurden.') newcomer_daten = load_gnd_top_daten('newcomer_top10')", "sowie der Titeldaten der Deutschen Nationalbibliothek (DNB) inkl. Zeitschriftendatenbank (ZDB),", "as f: auto_entites = int(f.read()) #GND-Entitäten aus Fremddaten with open(f\"{path}/../stats/title_gnd_links_ext.csv\",", "erhalten die verfügbaren Auswertungen und Statstiken. Verwenden Sie einen auf", "'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader(f'Katalogisierungslevel in Satzart {satzart}') st.write('Alle", "GND-Verknüpfungen pro DNB-Titeldatensatz\") entity_df = pd.DataFrame.from_dict({\"intellektuell verknüpfte Entitäten\": uniques, \"Entitäten", "Normadatei (GND) sowie der Titeldaten der Deutschen Nationalbibliothek (DNB) inkl.", "den letzten 365 Tagen angelegt wurden.') newcomer_daten = load_gnd_top_daten('newcomer_top10') newcomer", "kleinere CSV-Dateien, die mit Python weiterverarbeitet werden. Das Dashboard ist", "{entities.replace(',','.')}\") def relationen(): #Top 10 der GND-Relationierungscodes rels = pd.read_csv(f'{path}/../stats/gnd_codes_all.csv',", "in DNB-Titeldaten, nach Satzart gefiltert if satzart == 'alle': st.subheader(f'TOP", "1) relation_count = alt.Chart(rels.nlargest(rels_filt, 'count', keep='all')).mark_bar().encode( alt.X('code', title='Relationierungs-Code', sort='-y'), alt.Y('count',", "satzart == 'alle': gesamt_entity_count() entities() newcomer() zeitverlauf() relationen() systematik() else:", "musik_filt, opacity=0.8, get_position='[lon, lat]', pickable=True, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, radiusscale=100,", "Fremddaten with open(f\"{path}/../stats/title_gnd_links_ext.csv\", \"r\") as f: fremd_entities = int(f.read()) #Anzahl", "pd.DataFrame() for file in glob.glob(f'{path}/../stats/title_gnd_{typ}_*.csv'): gnd_top_df = gnd_top_df.append(pd.read_csv(file, index_col=None)) return", "Monat'), tooltip=['count'] ) return st.altair_chart(created, use_container_width=True) def entities(): #GND-Entitäten nach", "if satzart == 'alle': entity_count = alt.Chart(df).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro", "GitHub-User [niko2342](https://github.com/niko2342/), [ramonvoges](https://github.com/ramonvoges), [a-wendler](https://github.com/a-wendler/) sowie <NAME> geschrieben. Sie gehören zur", "st.slider('Zeige Top ...', 5, len(rels), 10, 1) relation_count = alt.Chart(rels.nlargest(rels_filt,", "with open(f\"{path}/../stats/title_gnd_links_unique.csv\", \"r\") as f: uniques = int(f.read()) uniques_str =", "open(f\"{path}/../stats/title_gnd_mean.csv\", \"r\") as f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f\"{links.replace(',','.')} intellektuell vergebene", "'norm').iterrows(): if i <= 5: with col1: st.write(f'{i}. {row[\"name\"]}') elif", "Wirkungszentren der {limiter}er') col1, col2 = st.beta_columns(2) i = 1", "f'{uniques:,}' #Durchschnittliche Anzahl an GND-Verknüpfungen pro DNB-Titeldatensatz with open(f\"{path}/../stats/title_gnd_mean.csv\", \"r\")", "= st.slider('Zeige Top …', 5, len(classification), 10, 1) classification_count =", "dem Januar 1972') created_filt = st.slider('Zeitraum', 1972, 2021, (1972,2021), 1)", "as f: uniques = int(f.read()) uniques_str = f'{uniques:,}' #Durchschnittliche Anzahl", "des Deutschen Buch- und Schriftmuseums (DBSM) nachgewiesen. Der Gesamtabzug liegt", "with dnb: st.header('GND in der Deutschen Nationalbibliothek') gnd_top() dnb_links() streamlit_analytics.stop_tracking()", "satzart == \"Ts - Sachbegriffe\": sachbegriff_cloud() systematik_ts() dnb = st.beta_container()", "entity_count = alt.Chart(df).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level',", "der {satzart} Sätze, die in den letzten 365 Tagen angelegt", "{row[\"name\"]}') elif i > 5: with col2: st.write(f'{i}. {row[\"name\"]}') i", "sort='-y', title='Relationierungscode'), tooltip=[alt.Tooltip('count', title='Anzahl'), alt.Tooltip('code', title='Code')] ) st.altair_chart(relation_count, use_container_width=True) with", "def systematik_ts(): #Ranking der Systematik von Ts-Sätzen classification_ts = pd.read_csv(f'{path}/../stats/gnd_classification_Ts_all.csv',", "Sachbegriffs.') files = glob.glob(f'{path}/../stats/*Ts-count.csv') daten = [x[-23:-13] for x in", "Liste der möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_filt = st.slider('Zeige", "pdk.Layer( \"ScatterplotLayer\", df, pickable=True, opacity=0.5, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, line_width_min_pixels=1,", "#Top 10 der GND-Relationierungscodes rels = pd.read_csv(f'{path}/../stats/gnd_codes_all.csv', index_col=False) st.subheader('Relationen') st.write('GND-Datensätze", "in die Auswertung einbezogen. Eine detaillierte Auflistung der ausgewerteten Felder", "Sie etwas erfahren?\", ('alle', \"Tp - Personen\", \"Tb - Körperschaften\",", "= pd.DataFrame.from_dict({\"intellektuell verknüpfte Entitäten\": uniques, \"Entitäten aus automatischen Prozessen\": auto_entites,", "der GND-Musikwerke, Musik-Personen und Wikrungsorte und daraus abgeleitete Zentren der", "map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{Name}</b><br \\>Wirkungsort von {Anzahl} Personen\"})) def wirkungsorte_musik(): #nach", "möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_ts_filt = st.slider('Zeige TOP …',", "title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_ts_count, use_container_width=True) def zeitverlauf(): #zeitverlauf", "wirkungsorte aller personen in der gnd df = pd.read_csv(f'{path}/wirkungsorte-top50.csv') df.drop(columns=['id'],", "gnd_top = alt.Chart(top_daten).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y',", "karte der meistverwendeten wirkungsorte aller personen in der gnd df", "letzten 365 Tagen erstellt wurden if satzart == 'alle': st.subheader(f'TOP", "= alt.Chart(rels.nlargest(rels_filt, 'count', keep='all')).mark_bar().encode( alt.X('code', title='Relationierungs-Code', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('code',", ") else: st.subheader(f'TOP 10 {satzart} GND-Newcomer') st.write(f'TOP 10 der {satzart}", "{satzart}') st.write('Alle GND-Entitäten können in verschiedenen Katalogisierungsleveln (1-7) angelegt werden.", "alt.Chart(df.loc[df['entity'].str.startswith(satzart[:2])]).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip(", "{satzart}-Sätzen pro DNB-Titeldatensatz') #main st.title('GND-Dashboard') #infoebereich oben with st.beta_container(): st.info('Hier", "deren Wirkungszeiten erlaubt Rückschlüsse auf die musikalischen Zentren, wie sie", "st.header('GND Statistik allgemein') #allgemeine statistiken in abhängigkeit der satzart if", "i > 5: with col2: st.write(f'{i}. {row[\"name\"]}') i += 1", "GND-Systematik-Notationen classification = pd.read_csv(f'{path}/../stats/gnd_classification_all.csv', index_col=False) st.subheader('Systematik') st.write('Die Entitäten der GND", "pd.read_csv(f'{path}/wirkungsorte-top50.csv') df.drop(columns=['id'], inplace=True) df.rename(columns={'name': 'Name', 'count': 'Anzahl'}, inplace=True) st.header('TOP Wirkungsorte", "tooltip=[alt.Tooltip('entity', title='Satzart'), alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader('Entitäten und", "eingeordnet werden. Die Liste der möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).')", "Community der Deutschen Nationalbibliothek.') gnd_allgemein = st.beta_container() with gnd_allgemein: st.header('GND", "f: auto_entites = int(f.read()) #GND-Entitäten aus Fremddaten with open(f\"{path}/../stats/title_gnd_links_ext.csv\", \"r\")", "Satzart gefiltert if satzart == 'alle': st.subheader(f'TOP 10 GND-Entitäten in", "int(f.read()) uniques_str = f'{uniques:,}' #Durchschnittliche Anzahl an GND-Verknüpfungen pro DNB-Titeldatensatz", "daten = [x[-23:-13] for x in files] daten.sort() daten_filter =", "den DNB-Titeldaten. Durchschnittlich {mean} GND-Verknüpfungen pro DNB-Titeldatensatz\") entity_df = pd.DataFrame.from_dict({\"intellektuell", "filled=True, radius_min_pixels=1, radius_max_pixels=100, line_width_min_pixels=1, get_position='[lon, lat]', get_radius=\"Anzahl\", get_fill_color=[255, 140, 0],", "GND-Entitäten in DNB-Titeldaten') st.write('Weniger als ein Drittel der GND-Entitäten in", "aus dem sehr großen Gesamtabzug (~ 31 GB) kleinere CSV-Dateien,", "in den letzten 365 Tagen erstellt wurden if satzart ==", "eingeordnet werden. Hier sind die Systematik-Notationen der Sachbegriffe (Ts) aufgetragen.", "df = pd.read_csv(f'{path}/../stats/gnd_entity_types.csv', index_col=False, names=['entity','count']) df['level'] = df.entity.str[2:3] df.entity =", "für einzelne satzarten if satzart == \"Tp - Personen\": wirkungsorte()", "weil Sie dann von qualifizierten Personen erstellt bzw. überprüft wurden.')", "title='Satzart'), alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader('Entitäten und Katalogisierungslevel')", "Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip('entity', title='Satzart'), alt.Tooltip( 'level', title='Katalogisierungslevel'),", "Objektbestand des Deutschen Buch- und Schriftmuseums (DBSM) nachgewiesen. Der Gesamtabzug", "systematik_ts() dnb = st.beta_container() with dnb: st.header('GND in der Deutschen", "alt.Chart(classification_ts.nlargest(class_ts_filt, 'count', keep='all')).mark_bar().encode( alt.X('id:N', title='Notation', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y',", "keep='all')).mark_bar().encode( alt.X('id:N', title='Notation', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Bezeichnung'), tooltip", "col2: st.write(f'{i}. {row[\"name\"]}') i += 1 def gesamt_entity_count(): #Gesamtzahl der", "wurden mit [Altair](https://altair-viz.github.io/index.html) erstellt, die Karten mit [Deck GL](https://deck.gl/) (via", "df.entity = df.entity.str[:2] if satzart == 'alle': entity_count = alt.Chart(df).mark_bar().encode(", "überprüft wurden.') return st.altair_chart(entity_count, use_container_width=True) def newcomer(): #TOP 10 der", "und 2010 gefilterte auswertung der GND-Musikwerke, Musik-Personen und Wikrungsorte und", "gibt es [hier](http://www.dnb.de/gndsyst).') class_filt = st.slider('Zeige Top …', 5, len(classification),", "wurden.') return st.altair_chart(entity_count, use_container_width=True) def newcomer(): #TOP 10 der Entitäten,", "get_line_color=[0, 0, 0] ) st.pydeck_chart(pdk.Deck( scatterplotlayer, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{Name}</b><br", "pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count',", "repräsentiert sind.') limiter = st.slider('Jahresfilter', min_value=1400, max_value=int(musiker_orte['jahrzehnt'].max()), value=(1900), step=10) musik_filt=", "am häufigsten verwendeten Relationierungscodes zu sehen. Die Auflösung der wichtigsten", "sich Exemplare der Zeitschrift im Bestand der DNB befinden. In", "GND-Satzart möchten Sie etwas erfahren?\", ('alle', \"Tp - Personen\", \"Tb", "= pd.read_csv(f'{path}/wirkungsorte-top50.csv') df.drop(columns=['id'], inplace=True) df.rename(columns={'name': 'Name', 'count': 'Anzahl'}, inplace=True) st.header('TOP", "= pd.DataFrame() for file in glob.glob(f'{path}/../stats/title_gnd_{typ}_*.csv'): gnd_top_df = gnd_top_df.append(pd.read_csv(file, index_col=None))", "sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.altair_chart(newcomer,", "grundlegende Zugriffsstatistik verwenden wir [streamlit-analytics](https://pypi.org/project/streamlit-analytics/). Dabei werden keine personenbezogenen Daten", "ausgewerteten Felder ist im [GitHub-Repository](https://git.io/JG5vN) dieses Dashboards dokumentiert.') st.altair_chart(gnd_top, use_container_width=True)", "die Karten mit [Deck GL](https://deck.gl/) (via [Pydeck](https://deckgl.readthedocs.io/en/latest/#)), die Wordcloud mit", "i = 1 for index, row in musik_filt.nlargest(10, 'norm').iterrows(): if", "Sachbegriffe\", \"Tu - Werke\", \"Tf - Veranstaltungen\") ) st.sidebar.info('Diese Widgets", "der Titeldaten der Deutschen Nationalbibliothek (DNB) inkl. Zeitschriftendatenbank (ZDB), sofern", "alt.Tooltip('count', title='Anzahl')] ) st.subheader(f'Katalogisierungslevel in Satzart {satzart}') st.write('Alle GND-Entitäten können", "musiker_orte = pd.read_csv(f'{path}/musiker_orte.csv', sep='\\t', index_col='idn') st.header('Wirkungszentren der Musik 1400–2010') st.write('Eine", "der GND-Entitäten with open(f\"{path}/../stats/gnd_entity_count.csv\", \"r\") as f: entities = f'{int(f.read()):,}'", "#Ranking der meistverwendeten GND-Systematik-Notationen classification = pd.read_csv(f'{path}/../stats/gnd_classification_all.csv', index_col=False) st.subheader('Systematik') st.write('Die", "in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links_unique.csv\", \"r\") as f: uniques = int(f.read())", "und Wikrungsorte und daraus abgeleitete Zentren der Musikkultur, dargestellt auf", "\"ScatterplotLayer\", musik_filt, opacity=0.8, get_position='[lon, lat]', pickable=True, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100,", "alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.write('Verknüpfungen, die maschinell erzeugt wurden,", "\"r\") as f: links = f'{int(f.read()):,}' #GND-Entitäten maschinell verknüpft with", "frei weitergenutzt werden. Die Daten werden monatlich aktualisiert. ''') #sidebar", "for file in glob.glob(f'{path}/../stats/title_gnd_{typ}_*.csv'): gnd_top_df = gnd_top_df.append(pd.read_csv(file, index_col=None)) return gnd_top_df", "title='Katalogisierungslevel'), tooltip=[alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader(f'Katalogisierungslevel in Satzart", "title='Anzahl'), alt.Color('name', sort='-y', title=\"Bezeichnung\"), tooltip=[alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')]", "st.beta_container() with gnd_allgemein: st.header('GND Statistik allgemein') #allgemeine statistiken in abhängigkeit", "title='Sätze pro Monat'), tooltip=['count'] ) return st.altair_chart(created, use_container_width=True) def entities():", "st.write('GND-Datensätze können mit anderen Datensätzen verlinkt (»relationiert«) werden. Die Art", "as f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f\"{links.replace(',','.')} intellektuell vergebene Verknüpfungen zu", "Tonträger- und Notenbestand des Deutschen Musikarchivs (DMA) sowie der Buch-", "GB) kleinere CSV-Dateien, die mit Python weiterverarbeitet werden. Das Dashboard", "value=10, step=1) classification_ts_count = alt.Chart(classification_ts.nlargest(class_ts_filt, 'count', keep='all')).mark_bar().encode( alt.X('id:N', title='Notation', sort='-y'),", "with st.beta_container(): st.info('Hier finden Sie statistische Auswertungen der GND und", "wc.generate_from_frequencies(worte) return st.image(wc.to_array()) def wirkungsorte(): #ranking und karte der meistverwendeten", "Entitäten gesamt: {relations.replace(',','.')}\") def systematik(): #Ranking der meistverwendeten GND-Systematik-Notationen classification", "pd.read_csv(f'{path}/musiker_orte.csv', sep='\\t', index_col='idn') st.header('Wirkungszentren der Musik 1400–2010') st.write('Eine Auswertung der", "st.write('Weniger als ein Drittel der GND-Entitäten in DNB-Titeldaten wurde in", "== \"Tp - Personen\": wirkungsorte() elif satzart == \"Tg -", "alt.Chart(rels.nlargest(rels_filt, 'count', keep='all')).mark_bar().encode( alt.X('code', title='Relationierungs-Code', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('code', sort='-y',", "der GND-Datensatzerstellung') st.write('Auf einer Zeitleiste wird die Anzahl der monatlich", "index_col=False, names=['entity','count']) df['level'] = df.entity.str[2:3] df.entity = df.entity.str[:2] if satzart", "WordCloud import streamlit_analytics path = os.path.dirname(__file__) streamlit_analytics.start_tracking() @st.cache def load_gnd_top_daten(typ):", "Tag') st.write('Wählen Sie ein Datum aus den letzten 10 Werktagen", "stammt aus Fremddaten.') entities = alt.Chart(entity_df).mark_bar().encode( alt.X('sum(Datenart):N', title='Datenart'), alt.Y('sum(Anzahl):Q', title='Anzahl'),", "value=(1900), step=10) musik_filt= musiker_orte.loc[(musiker_orte['jahrzehnt'] == limiter)] musik_filt['norm']=(musik_filt['count']-musik_filt['count'].min())/(musik_filt['count'].max()-musik_filt['count'].min()) #Karte INITIAL_VIEW_STATE =", "musiker_scatter = pdk.Layer( \"ScatterplotLayer\", musik_filt, opacity=0.8, get_position='[lon, lat]', pickable=True, stroked=True,", "und Notenbestand des Deutschen Musikarchivs (DMA) sowie der Buch- und", "der meistverwendeten wirkungsorte aller personen in der gnd df =", "systematik() else: entities() newcomer() #besondere widgets für einzelne satzarten if", "Dashboards und sehen Sie eine Wordcloud der 100 meistverwendeten GND-Sachbegriffe", "\"index\").reset_index() entity_df = entity_df.rename(columns={\"index\":\"Datenart\", 0:\"Anzahl\"}) st.subheader('Datenherkunft der GND-Entitäten in DNB-Titeldaten')", "title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.altair_chart(newcomer, use_container_width=True)", "zur Python Community der Deutschen Nationalbibliothek.') gnd_allgemein = st.beta_container() with", "statistische Auswertungen der GND und ihrer Verknüpfungen mit den Titeldaten", "der Häufigkeit des Sachbegriffs.') files = glob.glob(f'{path}/../stats/*Ts-count.csv') daten = [x[-23:-13]", "und Katalogisierungslevel df = pd.read_csv(f'{path}/../stats/gnd_entity_types.csv', index_col=False, names=['entity','count']) df['level'] = df.entity.str[2:3]", "pro DNB-Titeldatensatz') #main st.title('GND-Dashboard') #infoebereich oben with st.beta_container(): st.info('Hier finden", "...', 5, len(rels), 10, 1) relation_count = alt.Chart(rels.nlargest(rels_filt, 'count', keep='all')).mark_bar().encode(", "gesamt_entity_count(): #Gesamtzahl der GND-Entitäten with open(f\"{path}/../stats/gnd_entity_count.csv\", \"r\") as f: entities", "zu sehen. Die Auflösung der wichtigsten Codes gibt es [hier](https://wiki.dnb.de/download/attachments/51283696/Codeliste_ABCnachCode_Webseite_2012-07.pdf).')", "'alle': entity_count = alt.Chart(df).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'),", "st.write(f\"GND-Entitäten gesamt: {entities.replace(',','.')}\") def relationen(): #Top 10 der GND-Relationierungscodes rels", "st.slider('Zeige TOP …', min_value=5, max_value=len(classification_ts), value=10, step=1) classification_ts_count = alt.Chart(classification_ts.nlargest(class_ts_filt,", "allen Personensätzen (Tp) weisen 782.682 Angaben zum Wirkungsort der jeweiligen", "row in musik_filt.nlargest(10, 'norm').iterrows(): if i <= 5: with col1:", "satzart == 'alle': entity_count = alt.Chart(df).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'),", "use_container_width=True) def dnb_links(): #GND-Verknüpfungen in DNB Titeldaten if satzart ==", "mit [wordcloud](https://amueller.github.io/word_cloud/index.html). Für grundlegende Zugriffsstatistik verwenden wir [streamlit-analytics](https://pypi.org/project/streamlit-analytics/). Dabei werden", "100 meistverwendeten GND-Sachbegriffe dieses Tages. Die Größe des Begriffes entspricht", "= st.slider('Zeige Top ...', 5, len(rels), 10, 1) relation_count =", "pd.read_csv(f'{path}/../stats/gnd_classification_all.csv', index_col=False) st.subheader('Systematik') st.write('Die Entitäten der GND können in eine", "Katalogisierungslevel, desto verlässlicher die Daten, weil Sie dann von qualifizierten", "\"Ts - Sachbegriffe\", \"Tu - Werke\", \"Tf - Veranstaltungen\") )", "alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('bbg:N',", "Chromium basierenden Browser.') with st.beta_expander(\"Methodik und Datenherkunft\"): st.markdown(''' Datengrundlage ist", "use_container_width=True) with open(f\"{path}/../stats/gnd_relation_count.csv\", \"r\") as f: relations = f'{int(f.read()):,}' st.write(f\"Relationen", "Erschließungsprozessen vergeben, ca. ein Drittel stammt aus Fremddaten.') entities =", "#Anzahl GND-Verknüpfungen in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links.csv\", \"r\") as f: links", "Für grundlegende Zugriffsstatistik verwenden wir [streamlit-analytics](https://pypi.org/project/streamlit-analytics/). Dabei werden keine personenbezogenen", "Zeitschriftendatenbank (ZDB), sofern sich Exemplare der Zeitschrift im Bestand der", "aus dem Januar 1972') created_filt = st.slider('Zeitraum', 1972, 2021, (1972,2021),", "= pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 ) scatterplotlayer =", "Daten werden monatlich aktualisiert. ''') #sidebar mit satzartenfilter st.sidebar.header(\"Satzart wählen\")", "title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] )", "zeitverlauf() relationen() systematik() else: entities() newcomer() #besondere widgets für einzelne", "weiteres Drittel wurde in maschinellen Erschließungsprozessen vergeben, ca. ein Drittel", "oben with st.beta_container(): st.info('Hier finden Sie statistische Auswertungen der GND", ") return st.altair_chart(classification_ts_count, use_container_width=True) def zeitverlauf(): #zeitverlauf der erstellung der", "Wirkungszeiten erlaubt Rückschlüsse auf die musikalischen Zentren, wie sie im", "daten.sort() daten_filter = st.select_slider('Wählen Sie ein Datum', options=daten, value=daten[-1]) df", "max_words=100, width=2000, height=800, colormap='tab20') wc.generate_from_frequencies(worte) return st.image(wc.to_array()) def wirkungsorte(): #ranking", "der DNB repräsentiert sind.') limiter = st.slider('Jahresfilter', min_value=1400, max_value=int(musiker_orte['jahrzehnt'].max()), value=(1900),", "Skripte und Daten stehen unter CC0 Lizenz und können frei", "= f'{int(f.read()):,}' return st.write(f\"GND-Entitäten gesamt: {entities.replace(',','.')}\") def relationen(): #Top 10", "wurde in maschinellen Erschließungsprozessen vergeben, ca. ein Drittel stammt aus", "…', min_value=3, max_value=len(df), value=10, step=1) graph_count = alt.Chart(df.nlargest(orte_filt, 'Anzahl', keep='all')).mark_bar().encode(", "des Sachbegriffs.') files = glob.glob(f'{path}/../stats/*Ts-count.csv') daten = [x[-23:-13] for x", "def load_gnd_top_daten(typ): gnd_top_df = pd.DataFrame() for file in glob.glob(f'{path}/../stats/title_gnd_{typ}_*.csv'): gnd_top_df", "1 def gesamt_entity_count(): #Gesamtzahl der GND-Entitäten with open(f\"{path}/../stats/gnd_entity_count.csv\", \"r\") as", "st.write(f'{i}. {row[\"name\"]}') i += 1 def gesamt_entity_count(): #Gesamtzahl der GND-Entitäten", "st.slider('Zeige Top …', min_value=3, max_value=len(df), value=10, step=1) graph_count = alt.Chart(df.nlargest(orte_filt,", "etwas erfahren?\", ('alle', \"Tp - Personen\", \"Tb - Körperschaften\", \"Tg", "ein Datum aus den letzten 10 Werktagen vor der letzten", "die Auswertung einbezogen. Eine detaillierte Auflistung der ausgewerteten Felder ist", "longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 ) scatterplotlayer = pdk.Layer( \"ScatterplotLayer\", df,", "es [hier](http://www.dnb.de/gndsyst).') class_filt = st.slider('Zeige Top …', 5, len(classification), 10,", "alt.X('gnd_id', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'),", "die Wordcloud mit [wordcloud](https://amueller.github.io/word_cloud/index.html). Für grundlegende Zugriffsstatistik verwenden wir [streamlit-analytics](https://pypi.org/project/streamlit-analytics/).", "letzten 365 Tagen angelegt wurden.') newcomer_daten = pd.read_csv(f'{path}/../stats/title_gnd_newcomer_top10.csv', index_col=None) newcomer", "Entitäten der GND können in eine Systematik eingeordnet werden. Die", "df = pd.read_csv(f'{path}/wirkungsorte-top50.csv') df.drop(columns=['id'], inplace=True) df.rename(columns={'name': 'Name', 'count': 'Anzahl'}, inplace=True)", "den Titeldaten ist auch der Tonträger- und Notenbestand des Deutschen", "veröffentlichten Titel von Musikern und deren Wirkungszeiten erlaubt Rückschlüsse auf", "verwaist sind, wurden nicht in die Auswertung einbezogen. Eine detaillierte", "title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.write('Verknüpfungen, die maschinell erzeugt", "pd.DataFrame.from_dict({\"intellektuell verknüpfte Entitäten\": uniques, \"Entitäten aus automatischen Prozessen\": auto_entites, \"Entitäten", "10 GND-Entitäten in DNB-Titeldaten, nach Satzart gefiltert if satzart ==", "title='Anzahl')] ) st.altair_chart(newcomer, use_container_width=True) def gnd_top(): #TOP 10 GND-Entitäten in", "dnb_links(): #GND-Verknüpfungen in DNB Titeldaten if satzart == 'alle': #Anzahl", "classification_count = alt.Chart(classification.nlargest(class_filt, 'count', keep='all')).mark_bar().encode( alt.X('id', title='Notation', sort='-y'), alt.Y('count', title='Anzahl'),", "können in eine Systematik eingeordnet werden. Hier sind die Systematik-Notationen", "in files] daten.sort() daten_filter = st.select_slider('Wählen Sie ein Datum', options=daten,", "st.beta_columns(2) i = 1 for index, row in musik_filt.nlargest(10, 'norm').iterrows():", "\"Tg - Geografika\": wirkungsorte_musik() wirkungsorte() elif satzart == \"Ts -", "title='Anzahl')] ) st.subheader(f'Katalogisierungslevel in Satzart {satzart}') st.write('Alle GND-Entitäten können in", "wirkungsorte_musik() wirkungsorte() elif satzart == \"Ts - Sachbegriffe\": sachbegriff_cloud() systematik_ts()", "vergeben, ca. ein Drittel stammt aus Fremddaten.') entities = alt.Chart(entity_df).mark_bar().encode(", "Datensätzen verlinkt (»relationiert«) werden. Die Art der Verlinkung wird über", "zoom=4.5, max_zoom=16, bearing=0 ) scatterplotlayer = pdk.Layer( \"ScatterplotLayer\", df, pickable=True,", "as alt import pydeck as pdk import os import glob", "10 Wirkungszentren der {limiter}er') col1, col2 = st.beta_columns(2) i =", "st.subheader('Entitäten und Katalogisierungslevel') else: entity_count = alt.Chart(df.loc[df['entity'].str.startswith(satzart[:2])]).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro", "pd import altair as alt import pydeck as pdk import", "df.drop(columns=['id'], inplace=True) df.rename(columns={'name': 'Name', 'count': 'Anzahl'}, inplace=True) st.header('TOP Wirkungsorte von", "== 'alle': st.subheader(f'TOP 10 GND-Entitäten in DNB-Titeldaten') top_daten = pd.read_csv(f'{path}/../stats/title_gnd_top10.csv',", "with open(f\"{path}/../stats/gnd_entity_count.csv\", \"r\") as f: entities = f'{int(f.read()):,}' return st.write(f\"GND-Entitäten", "return st.altair_chart(classification_count, use_container_width=True) def systematik_ts(): #Ranking der Systematik von Ts-Sätzen", "f: entities = f'{int(f.read()):,}' return st.write(f\"GND-Entitäten gesamt: {entities.replace(',','.')}\") def relationen():", "werden. Je niedriger das Katalogisierungslevel, desto verlässlicher die Daten, weil", "Sie ein Datum aus den letzten 10 Werktagen vor der", "get_position='[lon, lat]', get_radius=\"Anzahl\", get_fill_color=[255, 140, 0], get_line_color=[0, 0, 0] )", "5: with col1: st.write(f'{i}. {row[\"name\"]}') elif i > 5: with", "der ausgewerteten Felder ist im [GitHub-Repository](https://git.io/JG5vN) dieses Dashboards dokumentiert.') st.altair_chart(gnd_top,", "Notenbestand des Deutschen Musikarchivs (DMA) sowie der Buch- und Objektbestand", "71, 51] ) st.pydeck_chart(pdk.Deck( musiker_scatter, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{name}</b>\"})) st.subheader(f'TOP", "GL](https://deck.gl/) (via [Pydeck](https://deckgl.readthedocs.io/en/latest/#)), die Wordcloud mit [wordcloud](https://amueller.github.io/word_cloud/index.html). Für grundlegende Zugriffsstatistik", "= int(f.read()) #Anzahl der intellktuell verknüpften GND-Entitäten in DNB-Titeldaten with", "st.altair_chart(newcomer, use_container_width=True) def gnd_top(): #TOP 10 GND-Entitäten in DNB-Titeldaten, nach", "Zugriffsstatistik verwenden wir [streamlit-analytics](https://pypi.org/project/streamlit-analytics/). Dabei werden keine personenbezogenen Daten gespeichert.", "der GND können in eine Systematik eingeordnet werden. Hier sind", "title='Relationierungscode'), tooltip=[alt.Tooltip('count', title='Anzahl'), alt.Tooltip('code', title='Code')] ) st.altair_chart(relation_count, use_container_width=True) with open(f\"{path}/../stats/gnd_relation_count.csv\",", "gnd_allgemein: st.header('GND Statistik allgemein') #allgemeine statistiken in abhängigkeit der satzart", "basierenden Browser.') with st.beta_expander(\"Methodik und Datenherkunft\"): st.markdown(''' Datengrundlage ist ein", "sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')]", "keep='all')).mark_bar().encode( alt.X('code', title='Relationierungs-Code', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('code', sort='-y', title='Relationierungscode'), tooltip=[alt.Tooltip('count',", "title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('bbg:N',", "from matplotlib.pyplot import title import streamlit as st import pandas", "10 der {satzart} Sätze, die in den letzten 365 Tagen", "Häufigkeit des Sachbegriffs.') files = glob.glob(f'{path}/../stats/*Ts-count.csv') daten = [x[-23:-13] for", "files] daten.sort() daten_filter = st.select_slider('Wählen Sie ein Datum', options=daten, value=daten[-1])", "DNB repräsentiert sind.') limiter = st.slider('Jahresfilter', min_value=1400, max_value=int(musiker_orte['jahrzehnt'].max()), value=(1900), step=10)", "alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader(f'Katalogisierungslevel in", "= os.path.dirname(__file__) streamlit_analytics.start_tracking() @st.cache def load_gnd_top_daten(typ): gnd_top_df = pd.DataFrame() for", "sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('count:Q', title='Anzahl')]", "if satzart == 'alle': gesamt_entity_count() entities() newcomer() zeitverlauf() relationen() systematik()", "Deutschen Nationalbibliothek (DNB) inkl. Zeitschriftendatenbank (ZDB), sofern sich Exemplare der", "Die Art der Verlinkung wird über einen Relationierungscode beschrieben. Hier", "\"Tg - Geografika\", \"Ts - Sachbegriffe\", \"Tu - Werke\", \"Tf", "in DNB Titeldaten if satzart == 'alle': #Anzahl GND-Verknüpfungen in", "[x[-23:-13] for x in files] daten.sort() daten_filter = st.select_slider('Wählen Sie", "der Daten des Dashboards und sehen Sie eine Wordcloud der", "Auswertung der veröffentlichten Titel von Musikern und deren Wirkungszeiten erlaubt", "365 Tagen angelegt wurden.') newcomer_daten = pd.read_csv(f'{path}/../stats/title_gnd_newcomer_top10.csv', index_col=None) newcomer =", "from wordcloud import WordCloud import streamlit_analytics path = os.path.dirname(__file__) streamlit_analytics.start_tracking()", "der Zeitschrift im Bestand der DNB befinden. In den Titeldaten", "Die Auflösung der wichtigsten Codes gibt es [hier](https://wiki.dnb.de/download/attachments/51283696/Codeliste_ABCnachCode_Webseite_2012-07.pdf).') rels_filt =", "ist im [GitHub-Repository](https://git.io/JG5vN) dieses Dashboards dokumentiert.') st.altair_chart(gnd_top, use_container_width=True) def dnb_links():", "Zeitschrift im Bestand der DNB befinden. In den Titeldaten ist", "Ts-Sätzen classification_ts = pd.read_csv(f'{path}/../stats/gnd_classification_Ts_all.csv', index_col=False) st.subheader('Systematik der Sachbegriffe') st.write('Die Entitäten", "= alt.Chart(df).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'),", "tooltip=['count'] ) return st.altair_chart(created, use_container_width=True) def entities(): #GND-Entitäten nach Satzart", "des Pica-Parsers [pica.rs](https://github.com/deutsche-nationalbibliothek/pica-rs) gefiltert. Dieses Tool produziert aus dem sehr", "pandas as pd import altair as alt import pydeck as", "open(f\"{path}/../stats/title_gnd_mean_{satzart[:2]}.csv\", \"r\") as f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f'Durchschnittlich {mean} Verknüpfungen", "st.altair_chart(entity_count, use_container_width=True) def newcomer(): #TOP 10 der Entitäten, die in", "Entitäten der GND können in eine Systematik eingeordnet werden. Hier", "st.info('Hier finden Sie statistische Auswertungen der GND und ihrer Verknüpfungen", "personenbezogenen Daten gespeichert. Alle Skripte und Daten stehen unter CC0", "wirkungsorte() elif satzart == \"Tg - Geografika\": wirkungsorte_musik() wirkungsorte() elif", "rels_filt = st.slider('Zeige Top ...', 5, len(rels), 10, 1) relation_count", "auf die musikalischen Zentren, wie sie im Bestand der DNB", "title='Anzahl')] ) else: st.subheader(f'TOP 10 {satzart} GND-Newcomer') st.write(f'TOP 10 der", "pro Tag') st.write('Wählen Sie ein Datum aus den letzten 10", "= {} for record in dict: worte.update({record['sachbegriff']:record['count']}) wc = WordCloud(background_color=\"white\",", "#Gesamtzahl der GND-Entitäten with open(f\"{path}/../stats/gnd_entity_count.csv\", \"r\") as f: entities =", "Prozessen\": auto_entites, \"Entitäten aus Fremddaten\": fremd_entities}, orient = \"index\").reset_index() entity_df", "alt.X('id:N', title='Notation', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Bezeichnung'), tooltip =", "1972, 2021, (1972,2021), 1) created = alt.Chart(created_at[f'{created_filt[0]}':f'{created_filt[1]}'].reset_index()).mark_line().encode( alt.X('created_at:T', title='Erstelldatum'), alt.Y('count:Q',", "erfahren?\", ('alle', \"Tp - Personen\", \"Tb - Körperschaften\", \"Tg -", "\"Entitäten aus automatischen Prozessen\": auto_entites, \"Entitäten aus Fremddaten\": fremd_entities}, orient", "names=['entity','count']) df['level'] = df.entity.str[2:3] df.entity = df.entity.str[:2] if satzart ==", "der DNB befinden. In den Titeldaten ist auch der Tonträger-", "[Github](https://github.com/buchmuseum/GND_Dashboard) zu finden. Die Diagramme wurden mit [Altair](https://altair-viz.github.io/index.html) erstellt, die", "entities() newcomer() #besondere widgets für einzelne satzarten if satzart ==", "alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'),", "tooltip={\"html\": \"<b>{Name}</b><br \\>Wirkungsort von {Anzahl} Personen\"})) def wirkungsorte_musik(): #nach jahrzehnten", ") st.pydeck_chart(pdk.Deck( scatterplotlayer, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{Name}</b><br \\>Wirkungsort von {Anzahl}", "detaillierte Auflistung der ausgewerteten Felder ist im [GitHub-Repository](https://git.io/JG5vN) dieses Dashboards", "wordcloud import WordCloud import streamlit_analytics path = os.path.dirname(__file__) streamlit_analytics.start_tracking() @st.cache", "alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q',", "as f: relations = f'{int(f.read()):,}' st.write(f\"Relationen zwischen Entitäten gesamt: {relations.replace(',','.')}\")", "st.sidebar.selectbox( \"Über welche GND-Satzart möchten Sie etwas erfahren?\", ('alle', \"Tp", "df = pd.read_csv(f'{path}/../stats/{daten_filter}-Ts-count.csv') dict = df.to_dict(orient='records') worte = {} for", "st.beta_container() with dnb: st.header('GND in der Deutschen Nationalbibliothek') gnd_top() dnb_links()", "title='Anzahl'), alt.Tooltip('code', title='Code')] ) st.altair_chart(relation_count, use_container_width=True) with open(f\"{path}/../stats/gnd_relation_count.csv\", \"r\") as", "musik_filt= musiker_orte.loc[(musiker_orte['jahrzehnt'] == limiter)] musik_filt['norm']=(musik_filt['count']-musik_filt['count'].min())/(musik_filt['count'].max()-musik_filt['count'].min()) #Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058,", "title='Code')] ) st.altair_chart(relation_count, use_container_width=True) with open(f\"{path}/../stats/gnd_relation_count.csv\", \"r\") as f: relations", "Die Skripte sowie die gefilterten CSV-Rohdaten sind auf [Github](https://github.com/buchmuseum/GND_Dashboard) zu", "radius_max_pixels=100, line_width_min_pixels=1, get_position='[lon, lat]', get_radius=\"Anzahl\", get_fill_color=[255, 140, 0], get_line_color=[0, 0,", "Sie einen auf Chromium basierenden Browser.') with st.beta_expander(\"Methodik und Datenherkunft\"):", "Die Liste der möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_filt =", "glob.glob(f'{path}/../stats/title_gnd_{typ}_*.csv'): gnd_top_df = gnd_top_df.append(pd.read_csv(file, index_col=None)) return gnd_top_df def sachbegriff_cloud(): #wordcloud", "title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip('entity', title='Satzart'), alt.Tooltip(", "== 'alle': st.subheader(f'TOP 10 GND-Newcomer') st.write('TOP 10 der GND-Entitäten, die", "92], get_line_color=[39, 71, 51] ) st.pydeck_chart(pdk.Deck( musiker_scatter, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\":", "\"Tf - Veranstaltungen\") ) st.sidebar.info('Diese Widgets haben die GitHub-User [niko2342](https://github.com/niko2342/),", "f: uniques = int(f.read()) uniques_str = f'{uniques:,}' #Durchschnittliche Anzahl an", "gnd_top_df.append(pd.read_csv(file, index_col=None)) return gnd_top_df def sachbegriff_cloud(): #wordcloud der top 100", "100 Sachbegriffe pro Tag') st.write('Wählen Sie ein Datum aus den", "GND-Datensatzerstellung') st.write('Auf einer Zeitleiste wird die Anzahl der monatlich erstellten", "ein Datum', options=daten, value=daten[-1]) df = pd.read_csv(f'{path}/../stats/{daten_filter}-Ts-count.csv') dict = df.to_dict(orient='records')", "Buch- und Objektbestand des Deutschen Buch- und Schriftmuseums (DBSM) nachgewiesen.", "der Gemeinsamen Normadatei (GND) sowie der Titeldaten der Deutschen Nationalbibliothek", "st.altair_chart(graph_count, use_container_width=True) #Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16,", "Python-Framework [Streamlit](https://streamlit.io/) geschrieben. Die Skripte sowie die gefilterten CSV-Rohdaten sind", "Sachbegriffe (Ts) aufgetragen. Die Liste der möglichen Notationen gibt es", "index_col='created_at', parse_dates=True, header=0, names=['created_at', 'count']) st.subheader('Zeitverlauf der GND-Datensatzerstellung') st.write('Auf einer", "st.altair_chart(gnd_top, use_container_width=True) def dnb_links(): #GND-Verknüpfungen in DNB Titeldaten if satzart", "Satzart und Katalogisierungslevel df = pd.read_csv(f'{path}/../stats/gnd_entity_types.csv', index_col=False, names=['entity','count']) df['level'] =", "Systematik eingeordnet werden. Hier sind die Systematik-Notationen der Sachbegriffe (Ts)", "Gesamtabzug (~ 31 GB) kleinere CSV-Dateien, die mit Python weiterverarbeitet", "\"r\") as f: fremd_entities = int(f.read()) #Anzahl der intellktuell verknüpften", "können mit anderen Datensätzen verlinkt (»relationiert«) werden. Die Art der", "title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP", "in den DNB-Titeldaten. Durchschnittlich {mean} GND-Verknüpfungen pro DNB-Titeldatensatz\") entity_df =", "Dabei werden keine personenbezogenen Daten gespeichert. Alle Skripte und Daten", "alt.Y('Anzahl'), alt.Color('Name:N', legend=alt.Legend(columns=2)), tooltip=[alt.Tooltip('Name:N', title='Ort'), alt.Tooltip('Anzahl:Q', title='Anzahl')] ) st.altair_chart(graph_count, use_container_width=True)", "\"Tb - Körperschaften\", \"Tg - Geografika\", \"Ts - Sachbegriffe\", \"Tu", "title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.altair_chart(newcomer, use_container_width=True) def gnd_top(): #TOP 10", "musiker_scatter, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{name}</b>\"})) st.subheader(f'TOP 10 Wirkungszentren der {limiter}er')", "in abhängigkeit der satzart if satzart == 'alle': gesamt_entity_count() entities()", "#Ranking der Systematik von Ts-Sätzen classification_ts = pd.read_csv(f'{path}/../stats/gnd_classification_Ts_all.csv', index_col=False) st.subheader('Systematik", "title='Katalogisierungslevel'), tooltip=[alt.Tooltip('entity', title='Satzart'), alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader('Entitäten", "satzart if satzart == 'alle': gesamt_entity_count() entities() newcomer() zeitverlauf() relationen()", "werden monatlich aktualisiert. ''') #sidebar mit satzartenfilter st.sidebar.header(\"Satzart wählen\") satzart", "maschinellen Erschließungsprozessen vergeben, ca. ein Drittel stammt aus Fremddaten.') entities", "dargestellt auf einer Karte musiker_orte = pd.read_csv(f'{path}/musiker_orte.csv', sep='\\t', index_col='idn') st.header('Wirkungszentren", "{limiter}er') col1, col2 = st.beta_columns(2) i = 1 for index,", "#nach jahrzehnten zwischen 1400 und 2010 gefilterte auswertung der GND-Musikwerke,", "tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.write('Verknüpfungen, die maschinell", "(ZDB), sofern sich Exemplare der Zeitschrift im Bestand der DNB", "Personensätzen (Tp) weisen 782.682 Angaben zum Wirkungsort der jeweiligen Person", "Juli 2021). Wählen Sie links die Satzart, die Sie interessiert,", "168, 92], get_line_color=[39, 71, 51] ) st.pydeck_chart(pdk.Deck( musiker_scatter, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT,", "auto_entites, \"Entitäten aus Fremddaten\": fremd_entities}, orient = \"index\").reset_index() entity_df =", "Die Größe des Begriffes entspricht der Häufigkeit des Sachbegriffs.') files", "Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_filt = st.slider('Zeige Top …', 5,", "class_ts_filt = st.slider('Zeige TOP …', min_value=5, max_value=len(classification_ts), value=10, step=1) classification_ts_count", "365 Tagen erstellt wurden if satzart == 'alle': st.subheader(f'TOP 10", "tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.altair_chart(newcomer, use_container_width=True) def", "eine Wordcloud der 100 meistverwendeten GND-Sachbegriffe dieses Tages. Die Größe", "sofern sich Exemplare der Zeitschrift im Bestand der DNB befinden.", "produziert aus dem sehr großen Gesamtabzug (~ 31 GB) kleinere", "Wordcloud der 100 meistverwendeten GND-Sachbegriffe dieses Tages. Die Größe des", "for index, row in musik_filt.nlargest(10, 'norm').iterrows(): if i <= 5:", "Pica-Parsers [pica.rs](https://github.com/deutsche-nationalbibliothek/pica-rs) gefiltert. Dieses Tool produziert aus dem sehr großen", "= alt.Chart(df.nlargest(orte_filt, 'Anzahl', keep='all')).mark_bar().encode( alt.X('Name:N', sort='y'), alt.Y('Anzahl'), alt.Color('Name:N', legend=alt.Legend(columns=2)), tooltip=[alt.Tooltip('Name:N',", "den letzten 365 Tagen erstellt wurden if satzart == 'alle':", "alt.Color('code', sort='-y', title='Relationierungscode'), tooltip=[alt.Tooltip('count', title='Anzahl'), alt.Tooltip('code', title='Code')] ) st.altair_chart(relation_count, use_container_width=True)", "Daten der Gemeinsamen Normadatei (GND) sowie der Titeldaten der Deutschen", "gnd df = pd.read_csv(f'{path}/wirkungsorte-top50.csv') df.drop(columns=['id'], inplace=True) df.rename(columns={'name': 'Name', 'count': 'Anzahl'},", "Fremddaten stammen oder verwaist sind, wurden nicht in die Auswertung", "title='Anzahl')] ) st.subheader('Entitäten und Katalogisierungslevel') else: entity_count = alt.Chart(df.loc[df['entity'].str.startswith(satzart[:2])]).mark_bar().encode( alt.X('sum(count)',", "os import glob from wordcloud import WordCloud import streamlit_analytics path", "DNB-Titeldaten') st.write('Weniger als ein Drittel der GND-Entitäten in DNB-Titeldaten wurde", "Fremddaten.') entities = alt.Chart(entity_df).mark_bar().encode( alt.X('sum(Datenart):N', title='Datenart'), alt.Y('sum(Anzahl):Q', title='Anzahl'), color='Datenart', tooltip='Anzahl:N'", "#wordcloud der top 100 sachbegriffe eines auszuwählenden tages der letzten", "in verschiedenen Katalogisierungsleveln (1-7) angelegt werden. Je niedriger das Katalogisierungslevel,", "use_container_width=True) #Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0", "auf einer Karte musiker_orte = pd.read_csv(f'{path}/musiker_orte.csv', sep='\\t', index_col='idn') st.header('Wirkungszentren der", "musik_filt['norm']=(musik_filt['count']-musik_filt['count'].min())/(musik_filt['count'].max()-musik_filt['count'].min()) #Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0", "- Sachbegriffe\", \"Tu - Werke\", \"Tf - Veranstaltungen\") ) st.sidebar.info('Diese", "der intellktuell verknüpften GND-Entitäten in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links_unique.csv\", \"r\") as", "#GND-Entitäten aus Fremddaten with open(f\"{path}/../stats/title_gnd_links_ext.csv\", \"r\") as f: fremd_entities =", "erstellt, die Karten mit [Deck GL](https://deck.gl/) (via [Pydeck](https://deckgl.readthedocs.io/en/latest/#)), die Wordcloud", "personen in der gnd df = pd.read_csv(f'{path}/wirkungsorte-top50.csv') df.drop(columns=['id'], inplace=True) df.rename(columns={'name':", "return st.write(f\"GND-Entitäten gesamt: {entities.replace(',','.')}\") def relationen(): #Top 10 der GND-Relationierungscodes", "alt.Color('Name:N', legend=alt.Legend(columns=2)), tooltip=[alt.Tooltip('Name:N', title='Ort'), alt.Tooltip('Anzahl:Q', title='Anzahl')] ) st.altair_chart(graph_count, use_container_width=True) #Karte", "i <= 5: with col1: st.write(f'{i}. {row[\"name\"]}') elif i >", "nicht in die Auswertung einbezogen. Eine detaillierte Auflistung der ausgewerteten", "{satzart} in DNB-Titeldaten') top_daten = load_gnd_top_daten('top10') gnd_top = alt.Chart(top_daten.loc[top_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode(", "'alle': st.subheader(f'TOP 10 GND-Entitäten in DNB-Titeldaten') top_daten = pd.read_csv(f'{path}/../stats/title_gnd_top10.csv', index_col=None)", "glob.glob(f'{path}/../stats/*Ts-count.csv') daten = [x[-23:-13] for x in files] daten.sort() daten_filter", "= pdk.Layer( \"ScatterplotLayer\", df, pickable=True, opacity=0.5, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100,", "as f: fremd_entities = int(f.read()) #Anzahl der intellktuell verknüpften GND-Entitäten", "st.altair_chart(classification_ts_count, use_container_width=True) def zeitverlauf(): #zeitverlauf der erstellung der GND-Sätze ab", "st.pydeck_chart(pdk.Deck( musiker_scatter, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{name}</b>\"})) st.subheader(f'TOP 10 Wirkungszentren der", "= int(f.read()) #GND-Entitäten aus Fremddaten with open(f\"{path}/../stats/title_gnd_links_ext.csv\", \"r\") as f:", "intellektuellen Erschließungsprozessen vergeben. Jeweils ca. ein weiteres Drittel wurde in", "alt.X('Name:N', sort='y'), alt.Y('Anzahl'), alt.Color('Name:N', legend=alt.Legend(columns=2)), tooltip=[alt.Tooltip('Name:N', title='Ort'), alt.Tooltip('Anzahl:Q', title='Anzahl')] )", "matplotlib.pyplot import title import streamlit as st import pandas as", "meistverwendeten GND-Systematik-Notationen classification = pd.read_csv(f'{path}/../stats/gnd_classification_all.csv', index_col=False) st.subheader('Systematik') st.write('Die Entitäten der", "= st.sidebar.selectbox( \"Über welche GND-Satzart möchten Sie etwas erfahren?\", ('alle',", "51] ) st.pydeck_chart(pdk.Deck( musiker_scatter, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{name}</b>\"})) st.subheader(f'TOP 10", "und Daten stehen unter CC0 Lizenz und können frei weitergenutzt", "GND-Personen') st.markdown('Von allen Personensätzen (Tp) weisen 782.682 Angaben zum Wirkungsort", "Liste der möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_ts_filt = st.slider('Zeige", "der GND-Entitäten, die in den letzten 365 Tagen angelegt wurden.')", "werden mithilfe des Pica-Parsers [pica.rs](https://github.com/deutsche-nationalbibliothek/pica-rs) gefiltert. Dieses Tool produziert aus", "gefiltert. Dieses Tool produziert aus dem sehr großen Gesamtabzug (~", "CSV-Rohdaten sind auf [Github](https://github.com/buchmuseum/GND_Dashboard) zu finden. Die Diagramme wurden mit", "Nationalbibliothek (DNB) inkl. Zeitschriftendatenbank (ZDB), sofern sich Exemplare der Zeitschrift", "es [hier](http://www.dnb.de/gndsyst).') class_ts_filt = st.slider('Zeige TOP …', min_value=5, max_value=len(classification_ts), value=10,", "nach Satzart und Katalogisierungslevel df = pd.read_csv(f'{path}/../stats/gnd_entity_types.csv', index_col=False, names=['entity','count']) df['level']", "alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Bezeichnung'), tooltip = [alt.Tooltip('id', title='Notation'), alt.Tooltip('name',", "int(f.read()) #GND-Entitäten aus Fremddaten with open(f\"{path}/../stats/title_gnd_links_ext.csv\", \"r\") as f: fremd_entities", "zu finden. Die Diagramme wurden mit [Altair](https://altair-viz.github.io/index.html) erstellt, die Karten", "str(round(float(f.read()),2)).replace('.',',') st.write(f\"{links.replace(',','.')} intellektuell vergebene Verknüpfungen zu {uniques_str.replace(',','.')} GND-Entitäten in den", "Buch- und Schriftmuseums (DBSM) nachgewiesen. Der Gesamtabzug liegt im OCLC-Format", "= alt.Chart(classification.nlargest(class_filt, 'count', keep='all')).mark_bar().encode( alt.X('id', title='Notation', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name',", "Gesamtabzug liegt im OCLC-Format PICA+ vor. Die Daten werden mithilfe", "sehr großen Gesamtabzug (~ 31 GB) kleinere CSV-Dateien, die mit", ") scatterplotlayer = pdk.Layer( \"ScatterplotLayer\", df, pickable=True, opacity=0.5, stroked=True, filled=True,", "in Satzart {satzart}') st.write('Alle GND-Entitäten können in verschiedenen Katalogisierungsleveln (1-7)", "wurden if satzart == 'alle': st.subheader(f'TOP 10 GND-Newcomer') st.write('TOP 10", "intellektuell vergebene Verknüpfungen zu {uniques_str.replace(',','.')} GND-Entitäten in den DNB-Titeldaten. Durchschnittlich", "title='IDN'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP 10 {satzart}", "with col1: st.write(f'{i}. {row[\"name\"]}') elif i > 5: with col2:", "großen Gesamtabzug (~ 31 GB) kleinere CSV-Dateien, die mit Python", "in DNB-Titeldaten wurde in intellektuellen Erschließungsprozessen vergeben. Jeweils ca. ein", "''') #sidebar mit satzartenfilter st.sidebar.header(\"Satzart wählen\") satzart = st.sidebar.selectbox( \"Über", "alt.Tooltip('count:Q', title='Anzahl')] ) st.write('Verknüpfungen, die maschinell erzeugt wurden, aus Fremddaten", "= st.select_slider('Wählen Sie ein Datum', options=daten, value=daten[-1]) df = pd.read_csv(f'{path}/../stats/{daten_filter}-Ts-count.csv')", "alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP 10 {satzart} in", "Auswertung einbezogen. Eine detaillierte Auflistung der ausgewerteten Felder ist im", "widgets für einzelne satzarten if satzart == \"Tp - Personen\":", "automatischen Prozessen\": auto_entites, \"Entitäten aus Fremddaten\": fremd_entities}, orient = \"index\").reset_index()", "= pd.read_csv(f'{path}/musiker_orte.csv', sep='\\t', index_col='idn') st.header('Wirkungszentren der Musik 1400–2010') st.write('Eine Auswertung", "Python Community der Deutschen Nationalbibliothek.') gnd_allgemein = st.beta_container() with gnd_allgemein:", "if i <= 5: with col1: st.write(f'{i}. {row[\"name\"]}') elif i", "Veranstaltungen\") ) st.sidebar.info('Diese Widgets haben die GitHub-User [niko2342](https://github.com/niko2342/), [ramonvoges](https://github.com/ramonvoges), [a-wendler](https://github.com/a-wendler/)", "Wählen Sie links die Satzart, die Sie interessiert, und Sie", ") st.sidebar.info('Diese Widgets haben die GitHub-User [niko2342](https://github.com/niko2342/), [ramonvoges](https://github.com/ramonvoges), [a-wendler](https://github.com/a-wendler/) sowie", "der top 100 sachbegriffe eines auszuwählenden tages der letzten 10", "satzart == \"Tp - Personen\": wirkungsorte() elif satzart == \"Tg", "satzart == 'alle': #Anzahl GND-Verknüpfungen in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links.csv\", \"r\")", "sind auf [Github](https://github.com/buchmuseum/GND_Dashboard) zu finden. Die Diagramme wurden mit [Altair](https://altair-viz.github.io/index.html)", "alt.Chart(top_daten).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N',", "Sachbegriffe pro Tag') st.write('Wählen Sie ein Datum aus den letzten", "= pd.read_csv(f'{path}/../stats/gnd_codes_all.csv', index_col=False) st.subheader('Relationen') st.write('GND-Datensätze können mit anderen Datensätzen verlinkt", "alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip('entity', title='Satzart'), alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] )", "classification_ts_count = alt.Chart(classification_ts.nlargest(class_ts_filt, 'count', keep='all')).mark_bar().encode( alt.X('id:N', title='Notation', sort='-y'), alt.Y('count:Q', title='Anzahl'),", "CC0 Lizenz und können frei weitergenutzt werden. Die Daten werden", "{Anzahl} Personen\"})) def wirkungsorte_musik(): #nach jahrzehnten zwischen 1400 und 2010", "können frei weitergenutzt werden. Die Daten werden monatlich aktualisiert. ''')", ") musiker_scatter = pdk.Layer( \"ScatterplotLayer\", musik_filt, opacity=0.8, get_position='[lon, lat]', pickable=True,", "[hier](http://www.dnb.de/gndsyst).') class_filt = st.slider('Zeige Top …', 5, len(classification), 10, 1)", "Wirkungsort der jeweiligen Person auf.') #Balkendiagramm orte_filt = st.slider('Zeige Top", "des Begriffes entspricht der Häufigkeit des Sachbegriffs.') files = glob.glob(f'{path}/../stats/*Ts-count.csv')", "alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP 10 {satzart} in DNB-Titeldaten') top_daten", "1) classification_count = alt.Chart(classification.nlargest(class_filt, 'count', keep='all')).mark_bar().encode( alt.X('id', title='Notation', sort='-y'), alt.Y('count',", "newcomer = alt.Chart(newcomer_daten).mark_bar().encode( alt.X('gnd_id', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y',", "1 for index, row in musik_filt.nlargest(10, 'norm').iterrows(): if i <=", "alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title=\"Bezeichnung\"), tooltip=[alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count',", "df, pickable=True, opacity=0.5, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, line_width_min_pixels=1, get_position='[lon, lat]',", "title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.altair_chart(newcomer, use_container_width=True) def gnd_top():", "Verwenden Sie einen auf Chromium basierenden Browser.') with st.beta_expander(\"Methodik und", "systematik_ts(): #Ranking der Systematik von Ts-Sätzen classification_ts = pd.read_csv(f'{path}/../stats/gnd_classification_Ts_all.csv', index_col=False)", "alt.Chart(top_daten.loc[top_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'),", "st.subheader(f'TOP 10 Wirkungszentren der {limiter}er') col1, col2 = st.beta_columns(2) i", "col2 = st.beta_columns(2) i = 1 for index, row in", "as st import pandas as pd import altair as alt", "title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip('entity', title='Satzart'), alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')]", "alt.Chart(df.nlargest(orte_filt, 'Anzahl', keep='all')).mark_bar().encode( alt.X('Name:N', sort='y'), alt.Y('Anzahl'), alt.Color('Name:N', legend=alt.Legend(columns=2)), tooltip=[alt.Tooltip('Name:N', title='Ort'),", "index_col='idn') st.header('Wirkungszentren der Musik 1400–2010') st.write('Eine Auswertung der veröffentlichten Titel", "10 Werktagen vor der letzten Aktualisierung der Daten des Dashboards", "= df.entity.str[2:3] df.entity = df.entity.str[:2] if satzart == 'alle': entity_count", "Systematik-Notationen der Sachbegriffe (Ts) aufgetragen. Die Liste der möglichen Notationen", "= f'{int(f.read()):,}' #GND-Entitäten maschinell verknüpft with open(f\"{path}/../stats/title_gnd_links_auto.csv\", \"r\") as f:", "= 1 for index, row in musik_filt.nlargest(10, 'norm').iterrows(): if i", "as pd import altair as alt import pydeck as pdk", "alt.X('created_at:T', title='Erstelldatum'), alt.Y('count:Q', title='Sätze pro Monat'), tooltip=['count'] ) return st.altair_chart(created,", "der möglichen Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_ts_filt = st.slider('Zeige TOP", "liegt im OCLC-Format PICA+ vor. Die Daten werden mithilfe des", "mit dem Python-Framework [Streamlit](https://streamlit.io/) geschrieben. Die Skripte sowie die gefilterten", "Deutschen Musikarchivs (DMA) sowie der Buch- und Objektbestand des Deutschen", "der GND können in eine Systematik eingeordnet werden. Die Liste", "der GND-Relationierungscodes rels = pd.read_csv(f'{path}/../stats/gnd_codes_all.csv', index_col=False) st.subheader('Relationen') st.write('GND-Datensätze können mit", "relations = f'{int(f.read()):,}' st.write(f\"Relationen zwischen Entitäten gesamt: {relations.replace(',','.')}\") def systematik():", "load_gnd_top_daten(typ): gnd_top_df = pd.DataFrame() for file in glob.glob(f'{path}/../stats/title_gnd_{typ}_*.csv'): gnd_top_df =", "title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('gnd_id:N', title='IDN'),", "wirkungsorte_musik(): #nach jahrzehnten zwischen 1400 und 2010 gefilterte auswertung der", "Titeldaten der Deutschen Nationalbibliothek (Stand der Daten: Juli 2021). Wählen", "(~ 31 GB) kleinere CSV-Dateien, die mit Python weiterverarbeitet werden.", "if satzart == \"Tp - Personen\": wirkungsorte() elif satzart ==", "Sie statistische Auswertungen der GND und ihrer Verknüpfungen mit den", "letzten 10 Werktagen vor der letzten Aktualisierung der Daten des", "GND-Entitäten in DNB-Titeldaten, nach Satzart gefiltert if satzart == 'alle':", "rels = pd.read_csv(f'{path}/../stats/gnd_codes_all.csv', index_col=False) st.subheader('Relationen') st.write('GND-Datensätze können mit anderen Datensätzen", "mit [Altair](https://altair-viz.github.io/index.html) erstellt, die Karten mit [Deck GL](https://deck.gl/) (via [Pydeck](https://deckgl.readthedocs.io/en/latest/#)),", "'alle': gesamt_entity_count() entities() newcomer() zeitverlauf() relationen() systematik() else: entities() newcomer()", "sie im Bestand der DNB repräsentiert sind.') limiter = st.slider('Jahresfilter',", "pd.read_csv(f'{path}/../stats/gnd_codes_all.csv', index_col=False) st.subheader('Relationen') st.write('GND-Datensätze können mit anderen Datensätzen verlinkt (»relationiert«)", "\"<b>{name}</b>\"})) st.subheader(f'TOP 10 Wirkungszentren der {limiter}er') col1, col2 = st.beta_columns(2)", "#Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 )", "int(f.read()) #Anzahl der intellktuell verknüpften GND-Entitäten in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links_unique.csv\",", "Alle Skripte und Daten stehen unter CC0 Lizenz und können", "names=['created_at', 'count']) st.subheader('Zeitverlauf der GND-Datensatzerstellung') st.write('Auf einer Zeitleiste wird die", "f'{int(f.read()):,}' return st.write(f\"GND-Entitäten gesamt: {entities.replace(',','.')}\") def relationen(): #Top 10 der", "def newcomer(): #TOP 10 der Entitäten, die in den letzten", "colormap='tab20') wc.generate_from_frequencies(worte) return st.image(wc.to_array()) def wirkungsorte(): #ranking und karte der", "Sie interessiert, und Sie erhalten die verfügbaren Auswertungen und Statstiken.", "pdk import os import glob from wordcloud import WordCloud import", "sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'),", "alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_ts_count, use_container_width=True) def zeitverlauf():", "limiter)] musik_filt['norm']=(musik_filt['count']-musik_filt['count'].min())/(musik_filt['count'].max()-musik_filt['count'].min()) #Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16,", "elif satzart == \"Ts - Sachbegriffe\": sachbegriff_cloud() systematik_ts() dnb =", "ab Januar 1972 created_at = pd.read_csv(f'{path}/../stats/gnd_created_at.csv', index_col='created_at', parse_dates=True, header=0, names=['created_at',", "Jeweils ca. ein weiteres Drittel wurde in maschinellen Erschließungsprozessen vergeben,", "ein Drittel stammt aus Fremddaten.') entities = alt.Chart(entity_df).mark_bar().encode( alt.X('sum(Datenart):N', title='Datenart'),", "= pd.read_csv(f'{path}/../stats/gnd_entity_types.csv', index_col=False, names=['entity','count']) df['level'] = df.entity.str[2:3] df.entity = df.entity.str[:2]", "Systematik von Ts-Sätzen classification_ts = pd.read_csv(f'{path}/../stats/gnd_classification_Ts_all.csv', index_col=False) st.subheader('Systematik der Sachbegriffe')", "Hier sind die am häufigsten verwendeten Relationierungscodes zu sehen. Die", "import glob from wordcloud import WordCloud import streamlit_analytics path =", "pydeck as pdk import os import glob from wordcloud import", "longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 ) musiker_scatter = pdk.Layer( \"ScatterplotLayer\", musik_filt,", "sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Bezeichnung'), tooltip = [alt.Tooltip('id', title='Notation'),", "den letzten 365 Tagen angelegt wurden.') newcomer_daten = pd.read_csv(f'{path}/../stats/title_gnd_newcomer_top10.csv', index_col=None)", "in DNB-Titeldaten') st.write('Weniger als ein Drittel der GND-Entitäten in DNB-Titeldaten", "wurden.') newcomer_daten = load_gnd_top_daten('newcomer_top10') newcomer = alt.Chart(newcomer_daten.loc[newcomer_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:O', title='Entitäten',", "st.markdown('Von allen Personensätzen (Tp) weisen 782.682 Angaben zum Wirkungsort der", "\"r\") as f: auto_entites = int(f.read()) #GND-Entitäten aus Fremddaten with", "gesamt: {entities.replace(',','.')}\") def relationen(): #Top 10 der GND-Relationierungscodes rels =", "mit den Titeldaten der Deutschen Nationalbibliothek (Stand der Daten: Juli", "haben die GitHub-User [niko2342](https://github.com/niko2342/), [ramonvoges](https://github.com/ramonvoges), [a-wendler](https://github.com/a-wendler/) sowie <NAME> geschrieben. Sie", "\"r\") as f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f\"{links.replace(',','.')} intellektuell vergebene Verknüpfungen", "(DNB) inkl. Zeitschriftendatenbank (ZDB), sofern sich Exemplare der Zeitschrift im", "title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')]", "na=False)]).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N',", "title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader(f'Katalogisierungslevel in Satzart {satzart}') st.write('Alle GND-Entitäten", "alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('gnd_id:N',", "alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP 10 {satzart} GND-Newcomer') st.write(f'TOP 10", "der Musik 1400–2010') st.write('Eine Auswertung der veröffentlichten Titel von Musikern", "top_daten = load_gnd_top_daten('top10') gnd_top = alt.Chart(top_daten.loc[top_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'),", "GND-Sätze aufgetragen. Die ersten Sätze stammen aus dem Januar 1972')", "auf Chromium basierenden Browser.') with st.beta_expander(\"Methodik und Datenherkunft\"): st.markdown(''' Datengrundlage", "Tagen angelegt wurden.') newcomer_daten = pd.read_csv(f'{path}/../stats/title_gnd_newcomer_top10.csv', index_col=None) newcomer = alt.Chart(newcomer_daten).mark_bar().encode(", "und ihrer Verknüpfungen mit den Titeldaten der Deutschen Nationalbibliothek (Stand", "tooltip=[alt.Tooltip('count', title='Anzahl'), alt.Tooltip('code', title='Code')] ) st.altair_chart(relation_count, use_container_width=True) with open(f\"{path}/../stats/gnd_relation_count.csv\", \"r\")", "= df.to_dict(orient='records') worte = {} for record in dict: worte.update({record['sachbegriff']:record['count']})", "step=1) classification_ts_count = alt.Chart(classification_ts.nlargest(class_ts_filt, 'count', keep='all')).mark_bar().encode( alt.X('id:N', title='Notation', sort='-y'), alt.Y('count:Q',", "st.subheader(f'TOP 10 {satzart} GND-Newcomer') st.write(f'TOP 10 der {satzart} Sätze, die", "wurden nicht in die Auswertung einbezogen. Eine detaillierte Auflistung der", "der monatlich erstellten GND-Sätze aufgetragen. Die ersten Sätze stammen aus", "open(f\"{path}/../stats/title_gnd_links_auto.csv\", \"r\") as f: auto_entites = int(f.read()) #GND-Entitäten aus Fremddaten", "title='Anzahl')] ) st.altair_chart(graph_count, use_container_width=True) #Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392,", "width=2000, height=800, colormap='tab20') wc.generate_from_frequencies(worte) return st.image(wc.to_array()) def wirkungsorte(): #ranking und", "den letzten 10 Werktagen vor der letzten Aktualisierung der Daten", "Sie eine Wordcloud der 100 meistverwendeten GND-Sachbegriffe dieses Tages. Die", "color='Datenart', tooltip='Anzahl:N' ) st.altair_chart(entities, use_container_width=True) else: with open(f\"{path}/../stats/title_gnd_mean_{satzart[:2]}.csv\", \"r\") as", "import pandas as pd import altair as alt import pydeck", "vergebene Verknüpfungen zu {uniques_str.replace(',','.')} GND-Entitäten in den DNB-Titeldaten. Durchschnittlich {mean}", "von {Anzahl} Personen\"})) def wirkungsorte_musik(): #nach jahrzehnten zwischen 1400 und", "von qualifizierten Personen erstellt bzw. überprüft wurden.') return st.altair_chart(entity_count, use_container_width=True)", "title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_count, use_container_width=True) def", "vergeben. Jeweils ca. ein weiteres Drittel wurde in maschinellen Erschließungsprozessen", "#GND-Entitäten nach Satzart und Katalogisierungslevel df = pd.read_csv(f'{path}/../stats/gnd_entity_types.csv', index_col=False, names=['entity','count'])", "DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links_unique.csv\", \"r\") as f: uniques = int(f.read()) uniques_str", "Datum aus den letzten 10 Werktagen vor der letzten Aktualisierung", "sehen. Die Auflösung der wichtigsten Codes gibt es [hier](https://wiki.dnb.de/download/attachments/51283696/Codeliste_ABCnachCode_Webseite_2012-07.pdf).') rels_filt", "st.write(f\"Relationen zwischen Entitäten gesamt: {relations.replace(',','.')}\") def systematik(): #Ranking der meistverwendeten", "stammen oder verwaist sind, wurden nicht in die Auswertung einbezogen.", "st.write('Verknüpfungen, die maschinell erzeugt wurden, aus Fremddaten stammen oder verwaist", "f'{int(f.read()):,}' #GND-Entitäten maschinell verknüpft with open(f\"{path}/../stats/title_gnd_links_auto.csv\", \"r\") as f: auto_entites", "[hier](https://wiki.dnb.de/download/attachments/51283696/Codeliste_ABCnachCode_Webseite_2012-07.pdf).') rels_filt = st.slider('Zeige Top ...', 5, len(rels), 10, 1)", "df.entity.str[:2] if satzart == 'alle': entity_count = alt.Chart(df).mark_bar().encode( alt.X('sum(count)', title='Datensätze", "elif satzart == \"Tg - Geografika\": wirkungsorte_musik() wirkungsorte() elif satzart", "des Deutschen Musikarchivs (DMA) sowie der Buch- und Objektbestand des", "Karten mit [Deck GL](https://deck.gl/) (via [Pydeck](https://deckgl.readthedocs.io/en/latest/#)), die Wordcloud mit [wordcloud](https://amueller.github.io/word_cloud/index.html).", "GND-Entitäten in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links_unique.csv\", \"r\") as f: uniques =", "aus automatischen Prozessen\": auto_entites, \"Entitäten aus Fremddaten\": fremd_entities}, orient =", "Musikern und deren Wirkungszeiten erlaubt Rückschlüsse auf die musikalischen Zentren,", "der meistverwendeten GND-Systematik-Notationen classification = pd.read_csv(f'{path}/../stats/gnd_classification_all.csv', index_col=False) st.subheader('Systematik') st.write('Die Entitäten", "== 'alle': gesamt_entity_count() entities() newcomer() zeitverlauf() relationen() systematik() else: entities()", "einer Zeitleiste wird die Anzahl der monatlich erstellten GND-Sätze aufgetragen.", "Titeldaten ist auch der Tonträger- und Notenbestand des Deutschen Musikarchivs", "alt.X('sum(Datenart):N', title='Datenart'), alt.Y('sum(Anzahl):Q', title='Anzahl'), color='Datenart', tooltip='Anzahl:N' ) st.altair_chart(entities, use_container_width=True) else:", "elif i > 5: with col2: st.write(f'{i}. {row[\"name\"]}') i +=", "die Satzart, die Sie interessiert, und Sie erhalten die verfügbaren", "Lizenz und können frei weitergenutzt werden. Die Daten werden monatlich", "der Verlinkung wird über einen Relationierungscode beschrieben. Hier sind die", "alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] )", "(GND) sowie der Titeldaten der Deutschen Nationalbibliothek (DNB) inkl. Zeitschriftendatenbank", "GND-Entitäten, die in den letzten 365 Tagen angelegt wurden.') newcomer_daten", "files = glob.glob(f'{path}/../stats/*Ts-count.csv') daten = [x[-23:-13] for x in files]", "WordCloud(background_color=\"white\", max_words=100, width=2000, height=800, colormap='tab20') wc.generate_from_frequencies(worte) return st.image(wc.to_array()) def wirkungsorte():", "und karte der meistverwendeten wirkungsorte aller personen in der gnd", "title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.write('Verknüpfungen, die", "von GND-Personen') st.markdown('Von allen Personensätzen (Tp) weisen 782.682 Angaben zum", "aus Fremddaten with open(f\"{path}/../stats/title_gnd_links_ext.csv\", \"r\") as f: fremd_entities = int(f.read())", "Satzart, die Sie interessiert, und Sie erhalten die verfügbaren Auswertungen", "tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('count:Q', title='Anzahl')] ) else:", "in musik_filt.nlargest(10, 'norm').iterrows(): if i <= 5: with col1: st.write(f'{i}.", "100 sachbegriffe eines auszuwählenden tages der letzten 10 werktage st.header('TOP", "= pd.read_csv(f'{path}/../stats/title_gnd_top10.csv', index_col=None) gnd_top = alt.Chart(top_daten).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q',", "{mean} Verknüpfungen zu {satzart}-Sätzen pro DNB-Titeldatensatz') #main st.title('GND-Dashboard') #infoebereich oben", "zoom=4.5, max_zoom=16, bearing=0 ) musiker_scatter = pdk.Layer( \"ScatterplotLayer\", musik_filt, opacity=0.8,", "sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('bbg:N', title='Satzart'),", "an GND-Verknüpfungen pro DNB-Titeldatensatz with open(f\"{path}/../stats/title_gnd_mean.csv\", \"r\") as f: mean", "'count', keep='all')).mark_bar().encode( alt.X('code', title='Relationierungs-Code', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('code', sort='-y', title='Relationierungscode'),", "= alt.Chart(entity_df).mark_bar().encode( alt.X('sum(Datenart):N', title='Datenart'), alt.Y('sum(Anzahl):Q', title='Anzahl'), color='Datenart', tooltip='Anzahl:N' ) st.altair_chart(entities,", "title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_count, use_container_width=True) def systematik_ts(): #Ranking", "st.subheader(f'TOP 10 {satzart} in DNB-Titeldaten') top_daten = load_gnd_top_daten('top10') gnd_top =", "DNB-Titeldatensatz') #main st.title('GND-Dashboard') #infoebereich oben with st.beta_container(): st.info('Hier finden Sie", "len(classification), 10, 1) classification_count = alt.Chart(classification.nlargest(class_filt, 'count', keep='all')).mark_bar().encode( alt.X('id', title='Notation',", "der gnd df = pd.read_csv(f'{path}/wirkungsorte-top50.csv') df.drop(columns=['id'], inplace=True) df.rename(columns={'name': 'Name', 'count':", "GND-Newcomer') st.write('TOP 10 der GND-Entitäten, die in den letzten 365", "einer Karte musiker_orte = pd.read_csv(f'{path}/musiker_orte.csv', sep='\\t', index_col='idn') st.header('Wirkungszentren der Musik", "und können frei weitergenutzt werden. Die Daten werden monatlich aktualisiert.", "365 Tagen angelegt wurden.') newcomer_daten = load_gnd_top_daten('newcomer_top10') newcomer = alt.Chart(newcomer_daten.loc[newcomer_daten['bbg'].str.startswith(satzart[:2],", "with open(f\"{path}/../stats/gnd_relation_count.csv\", \"r\") as f: relations = f'{int(f.read()):,}' st.write(f\"Relationen zwischen", "st.select_slider('Wählen Sie ein Datum', options=daten, value=daten[-1]) df = pd.read_csv(f'{path}/../stats/{daten_filter}-Ts-count.csv') dict", "class_filt = st.slider('Zeige Top …', 5, len(classification), 10, 1) classification_count", "angelegt werden. Je niedriger das Katalogisierungslevel, desto verlässlicher die Daten,", "Angaben zum Wirkungsort der jeweiligen Person auf.') #Balkendiagramm orte_filt =", "sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.write('Verknüpfungen,", "über einen Relationierungscode beschrieben. Hier sind die am häufigsten verwendeten", "get_line_color=[39, 71, 51] ) st.pydeck_chart(pdk.Deck( musiker_scatter, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{name}</b>\"}))", "wie sie im Bestand der DNB repräsentiert sind.') limiter =", "initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{Name}</b><br \\>Wirkungsort von {Anzahl} Personen\"})) def wirkungsorte_musik():", "Wirkungsorte von GND-Personen') st.markdown('Von allen Personensätzen (Tp) weisen 782.682 Angaben", "niedriger das Katalogisierungslevel, desto verlässlicher die Daten, weil Sie dann", "meistverwendeten GND-Sachbegriffe dieses Tages. Die Größe des Begriffes entspricht der", "10 der GND-Entitäten, die in den letzten 365 Tagen angelegt", "weiterverarbeitet werden. Das Dashboard ist mit dem Python-Framework [Streamlit](https://streamlit.io/) geschrieben.", "auszuwählenden tages der letzten 10 werktage st.header('TOP 100 Sachbegriffe pro", "== \"Ts - Sachbegriffe\": sachbegriff_cloud() systematik_ts() dnb = st.beta_container() with", "df.entity.str[2:3] df.entity = df.entity.str[:2] if satzart == 'alle': entity_count =", ") st.altair_chart(graph_count, use_container_width=True) #Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5,", "st.header('Wirkungszentren der Musik 1400–2010') st.write('Eine Auswertung der veröffentlichten Titel von", "\"Entitäten aus Fremddaten\": fremd_entities}, orient = \"index\").reset_index() entity_df = entity_df.rename(columns={\"index\":\"Datenart\",", "as f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f'Durchschnittlich {mean} Verknüpfungen zu {satzart}-Sätzen", "2010 gefilterte auswertung der GND-Musikwerke, Musik-Personen und Wikrungsorte und daraus", "#zeitverlauf der erstellung der GND-Sätze ab Januar 1972 created_at =", "tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) else:", "str(round(float(f.read()),2)).replace('.',',') st.write(f'Durchschnittlich {mean} Verknüpfungen zu {satzart}-Sätzen pro DNB-Titeldatensatz') #main st.title('GND-Dashboard')", "title='Notation', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Bezeichnung'), tooltip = [alt.Tooltip('id',", "value=daten[-1]) df = pd.read_csv(f'{path}/../stats/{daten_filter}-Ts-count.csv') dict = df.to_dict(orient='records') worte = {}", "= pd.read_csv(f'{path}/../stats/{daten_filter}-Ts-count.csv') dict = df.to_dict(orient='records') worte = {} for record", "(DBSM) nachgewiesen. Der Gesamtabzug liegt im OCLC-Format PICA+ vor. Die", "ca. ein Drittel stammt aus Fremddaten.') entities = alt.Chart(entity_df).mark_bar().encode( alt.X('sum(Datenart):N',", "(Tp) weisen 782.682 Angaben zum Wirkungsort der jeweiligen Person auf.')", "OCLC-Format PICA+ vor. Die Daten werden mithilfe des Pica-Parsers [pica.rs](https://github.com/deutsche-nationalbibliothek/pica-rs)", "Art der Verlinkung wird über einen Relationierungscode beschrieben. Hier sind", "sowie <NAME> geschrieben. Sie gehören zur Python Community der Deutschen", "abhängigkeit der satzart if satzart == 'alle': gesamt_entity_count() entities() newcomer()", "fremd_entities}, orient = \"index\").reset_index() entity_df = entity_df.rename(columns={\"index\":\"Datenart\", 0:\"Anzahl\"}) st.subheader('Datenherkunft der", "[wordcloud](https://amueller.github.io/word_cloud/index.html). Für grundlegende Zugriffsstatistik verwenden wir [streamlit-analytics](https://pypi.org/project/streamlit-analytics/). Dabei werden keine", "#TOP 10 GND-Entitäten in DNB-Titeldaten, nach Satzart gefiltert if satzart", "title='Satzart'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP 10 {satzart}", "Wordcloud mit [wordcloud](https://amueller.github.io/word_cloud/index.html). Für grundlegende Zugriffsstatistik verwenden wir [streamlit-analytics](https://pypi.org/project/streamlit-analytics/). Dabei", "def wirkungsorte(): #ranking und karte der meistverwendeten wirkungsorte aller personen", "st.write('TOP 10 der GND-Entitäten, die in den letzten 365 Tagen", "title import streamlit as st import pandas as pd import", "= WordCloud(background_color=\"white\", max_words=100, width=2000, height=800, colormap='tab20') wc.generate_from_frequencies(worte) return st.image(wc.to_array()) def", "sowie die gefilterten CSV-Rohdaten sind auf [Github](https://github.com/buchmuseum/GND_Dashboard) zu finden. Die", "die in den letzten 365 Tagen angelegt wurden.') newcomer_daten =", "sachbegriff_cloud(): #wordcloud der top 100 sachbegriffe eines auszuwählenden tages der", "im OCLC-Format PICA+ vor. Die Daten werden mithilfe des Pica-Parsers", "mithilfe des Pica-Parsers [pica.rs](https://github.com/deutsche-nationalbibliothek/pica-rs) gefiltert. Dieses Tool produziert aus dem", "mit Python weiterverarbeitet werden. Das Dashboard ist mit dem Python-Framework", "graph_count = alt.Chart(df.nlargest(orte_filt, 'Anzahl', keep='all')).mark_bar().encode( alt.X('Name:N', sort='y'), alt.Y('Anzahl'), alt.Color('Name:N', legend=alt.Legend(columns=2)),", ") st.subheader(f'Katalogisierungslevel in Satzart {satzart}') st.write('Alle GND-Entitäten können in verschiedenen", "st.markdown(''' Datengrundlage ist ein Gesamtabzug der Daten der Gemeinsamen Normadatei", "verknüpften GND-Entitäten in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links_unique.csv\", \"r\") as f: uniques", "index_col=False) st.subheader('Relationen') st.write('GND-Datensätze können mit anderen Datensätzen verlinkt (»relationiert«) werden.", "top_daten = pd.read_csv(f'{path}/../stats/title_gnd_top10.csv', index_col=None) gnd_top = alt.Chart(top_daten).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'),", "use_container_width=True) else: with open(f\"{path}/../stats/title_gnd_mean_{satzart[:2]}.csv\", \"r\") as f: mean = str(round(float(f.read()),2)).replace('.',',')", "@st.cache def load_gnd_top_daten(typ): gnd_top_df = pd.DataFrame() for file in glob.glob(f'{path}/../stats/title_gnd_{typ}_*.csv'):", "[Altair](https://altair-viz.github.io/index.html) erstellt, die Karten mit [Deck GL](https://deck.gl/) (via [Pydeck](https://deckgl.readthedocs.io/en/latest/#)), die", "verfügbaren Auswertungen und Statstiken. Verwenden Sie einen auf Chromium basierenden", "use_container_width=True) def entities(): #GND-Entitäten nach Satzart und Katalogisierungslevel df =", "alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] )", "sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'),", "der Musikkultur, dargestellt auf einer Karte musiker_orte = pd.read_csv(f'{path}/musiker_orte.csv', sep='\\t',", "nachgewiesen. Der Gesamtabzug liegt im OCLC-Format PICA+ vor. Die Daten", "\"Tu - Werke\", \"Tf - Veranstaltungen\") ) st.sidebar.info('Diese Widgets haben", "relationen() systematik() else: entities() newcomer() #besondere widgets für einzelne satzarten", "GND-Verknüpfungen in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links.csv\", \"r\") as f: links =", "= pd.read_csv(f'{path}/../stats/gnd_classification_all.csv', index_col=False) st.subheader('Systematik') st.write('Die Entitäten der GND können in", "= str(round(float(f.read()),2)).replace('.',',') st.write(f'Durchschnittlich {mean} Verknüpfungen zu {satzart}-Sätzen pro DNB-Titeldatensatz') #main", "Musik-Personen und Wikrungsorte und daraus abgeleitete Zentren der Musikkultur, dargestellt", "bzw. überprüft wurden.') return st.altair_chart(entity_count, use_container_width=True) def newcomer(): #TOP 10", "alt.Chart(classification.nlargest(class_filt, 'count', keep='all')).mark_bar().encode( alt.X('id', title='Notation', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y',", "der erstellung der GND-Sätze ab Januar 1972 created_at = pd.read_csv(f'{path}/../stats/gnd_created_at.csv',", "alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q',", "pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 ) scatterplotlayer = pdk.Layer(", "Auswertungen und Statstiken. Verwenden Sie einen auf Chromium basierenden Browser.')", "title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('count:Q', title='Anzahl')] )", "= pd.read_csv(f'{path}/../stats/gnd_classification_Ts_all.csv', index_col=False) st.subheader('Systematik der Sachbegriffe') st.write('Die Entitäten der GND", "Satzart {satzart}') st.write('Alle GND-Entitäten können in verschiedenen Katalogisierungsleveln (1-7) angelegt", "= [x[-23:-13] for x in files] daten.sort() daten_filter = st.select_slider('Wählen", "befinden. In den Titeldaten ist auch der Tonträger- und Notenbestand", "maschinell verknüpft with open(f\"{path}/../stats/title_gnd_links_auto.csv\", \"r\") as f: auto_entites = int(f.read())", "st.write(f\"{links.replace(',','.')} intellektuell vergebene Verknüpfungen zu {uniques_str.replace(',','.')} GND-Entitäten in den DNB-Titeldaten.", "file in glob.glob(f'{path}/../stats/title_gnd_{typ}_*.csv'): gnd_top_df = gnd_top_df.append(pd.read_csv(file, index_col=None)) return gnd_top_df def", "Notationen gibt es [hier](http://www.dnb.de/gndsyst).') class_ts_filt = st.slider('Zeige TOP …', min_value=5,", "value=10, step=1) graph_count = alt.Chart(df.nlargest(orte_filt, 'Anzahl', keep='all')).mark_bar().encode( alt.X('Name:N', sort='y'), alt.Y('Anzahl'),", "st.altair_chart(classification_count, use_container_width=True) def systematik_ts(): #Ranking der Systematik von Ts-Sätzen classification_ts", "verlässlicher die Daten, weil Sie dann von qualifizierten Personen erstellt", "def sachbegriff_cloud(): #wordcloud der top 100 sachbegriffe eines auszuwählenden tages", "st.write('Alle GND-Entitäten können in verschiedenen Katalogisierungsleveln (1-7) angelegt werden. Je", "index_col=None) gnd_top = alt.Chart(top_daten).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'), alt.Color('name:N',", "Sachbegriffe\": sachbegriff_cloud() systematik_ts() dnb = st.beta_container() with dnb: st.header('GND in", "map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{name}</b>\"})) st.subheader(f'TOP 10 Wirkungszentren der {limiter}er') col1, col2", "Relationierungscodes zu sehen. Die Auflösung der wichtigsten Codes gibt es", "if satzart == 'alle': #Anzahl GND-Verknüpfungen in DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links.csv\",", "parse_dates=True, header=0, names=['created_at', 'count']) st.subheader('Zeitverlauf der GND-Datensatzerstellung') st.write('Auf einer Zeitleiste", "1400 und 2010 gefilterte auswertung der GND-Musikwerke, Musik-Personen und Wikrungsorte", "= alt.Chart(newcomer_daten.loc[newcomer_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:O', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y',", "max_zoom=16, bearing=0 ) scatterplotlayer = pdk.Layer( \"ScatterplotLayer\", df, pickable=True, opacity=0.5,", "Personen\": wirkungsorte() elif satzart == \"Tg - Geografika\": wirkungsorte_musik() wirkungsorte()", "use_container_width=True) def systematik_ts(): #Ranking der Systematik von Ts-Sätzen classification_ts =", "- Sachbegriffe\": sachbegriff_cloud() systematik_ts() dnb = st.beta_container() with dnb: st.header('GND", ") st.write('Verknüpfungen, die maschinell erzeugt wurden, aus Fremddaten stammen oder", "else: entities() newcomer() #besondere widgets für einzelne satzarten if satzart", "der Daten der Gemeinsamen Normadatei (GND) sowie der Titeldaten der", "lat]', pickable=True, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, radiusscale=100, line_width_min_pixels=1, get_radius=\"norm*50000\", get_fill_color=[50,", "'count', keep='all')).mark_bar().encode( alt.X('id', title='Notation', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title=\"Bezeichnung\"),", "alt.Y('count', title='Anzahl'), alt.Color('code', sort='-y', title='Relationierungscode'), tooltip=[alt.Tooltip('count', title='Anzahl'), alt.Tooltip('code', title='Code')] )", "Tages. Die Größe des Begriffes entspricht der Häufigkeit des Sachbegriffs.')", "else: entity_count = alt.Chart(df.loc[df['entity'].str.startswith(satzart[:2])]).mark_bar().encode( alt.X('sum(count)', title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'),", "verknüpft with open(f\"{path}/../stats/title_gnd_links_auto.csv\", \"r\") as f: auto_entites = int(f.read()) #GND-Entitäten", "def entities(): #GND-Entitäten nach Satzart und Katalogisierungslevel df = pd.read_csv(f'{path}/../stats/gnd_entity_types.csv',", "DNB-Titeldaten with open(f\"{path}/../stats/title_gnd_links.csv\", \"r\") as f: links = f'{int(f.read()):,}' #GND-Entitäten", "die am häufigsten verwendeten Relationierungscodes zu sehen. Die Auflösung der", "von Musikern und deren Wirkungszeiten erlaubt Rückschlüsse auf die musikalischen", "title='Erstelldatum'), alt.Y('count:Q', title='Sätze pro Monat'), tooltip=['count'] ) return st.altair_chart(created, use_container_width=True)", "der Deutschen Nationalbibliothek (Stand der Daten: Juli 2021). Wählen Sie", "in DNB-Titeldaten') top_daten = pd.read_csv(f'{path}/../stats/title_gnd_top10.csv', index_col=None) gnd_top = alt.Chart(top_daten).mark_bar().encode( alt.X('gnd_id:N',", "möchten Sie etwas erfahren?\", ('alle', \"Tp - Personen\", \"Tb -", "min_value=3, max_value=len(df), value=10, step=1) graph_count = alt.Chart(df.nlargest(orte_filt, 'Anzahl', keep='all')).mark_bar().encode( alt.X('Name:N',", "index, row in musik_filt.nlargest(10, 'norm').iterrows(): if i <= 5: with", "alt.Tooltip('Anzahl:Q', title='Anzahl')] ) st.altair_chart(graph_count, use_container_width=True) #Karte INITIAL_VIEW_STATE = pdk.ViewState( latitude=50.67877877706058,", "Browser.') with st.beta_expander(\"Methodik und Datenherkunft\"): st.markdown(''' Datengrundlage ist ein Gesamtabzug", "pd.read_csv(f'{path}/../stats/title_gnd_top10.csv', index_col=None) gnd_top = alt.Chart(top_daten).mark_bar().encode( alt.X('gnd_id:N', title='Entitäten', sort='-y'), alt.Y('count:Q', title='Anzahl'),", "min_value=5, max_value=len(classification_ts), value=10, step=1) classification_ts_count = alt.Chart(classification_ts.nlargest(class_ts_filt, 'count', keep='all')).mark_bar().encode( alt.X('id:N',", "Titeldaten if satzart == 'alle': #Anzahl GND-Verknüpfungen in DNB-Titeldaten with", "ein Gesamtabzug der Daten der Gemeinsamen Normadatei (GND) sowie der", "mean = str(round(float(f.read()),2)).replace('.',',') st.write(f\"{links.replace(',','.')} intellektuell vergebene Verknüpfungen zu {uniques_str.replace(',','.')} GND-Entitäten", "def dnb_links(): #GND-Verknüpfungen in DNB Titeldaten if satzart == 'alle':", "[streamlit-analytics](https://pypi.org/project/streamlit-analytics/). Dabei werden keine personenbezogenen Daten gespeichert. Alle Skripte und", "eine Systematik eingeordnet werden. Hier sind die Systematik-Notationen der Sachbegriffe", "title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')]", "und Datenherkunft\"): st.markdown(''' Datengrundlage ist ein Gesamtabzug der Daten der", "letzten 365 Tagen angelegt wurden.') newcomer_daten = load_gnd_top_daten('newcomer_top10') newcomer =", "stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, line_width_min_pixels=1, get_position='[lon, lat]', get_radius=\"Anzahl\", get_fill_color=[255, 140,", "{satzart} GND-Newcomer') st.write(f'TOP 10 der {satzart} Sätze, die in den", "aus Fremddaten\": fremd_entities}, orient = \"index\").reset_index() entity_df = entity_df.rename(columns={\"index\":\"Datenart\", 0:\"Anzahl\"})", "f: relations = f'{int(f.read()):,}' st.write(f\"Relationen zwischen Entitäten gesamt: {relations.replace(',','.')}\") def", "= str(round(float(f.read()),2)).replace('.',',') st.write(f\"{links.replace(',','.')} intellektuell vergebene Verknüpfungen zu {uniques_str.replace(',','.')} GND-Entitäten in", "with open(f\"{path}/../stats/title_gnd_mean_{satzart[:2]}.csv\", \"r\") as f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f'Durchschnittlich {mean}", "sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('code', sort='-y', title='Relationierungscode'), tooltip=[alt.Tooltip('count', title='Anzahl'), alt.Tooltip('code', title='Code')]", "10 {satzart} in DNB-Titeldaten') top_daten = load_gnd_top_daten('top10') gnd_top = alt.Chart(top_daten.loc[top_daten['bbg'].str.startswith(satzart[:2],", "- Personen\", \"Tb - Körperschaften\", \"Tg - Geografika\", \"Ts -", "\"r\") as f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f'Durchschnittlich {mean} Verknüpfungen zu", "gefilterten CSV-Rohdaten sind auf [Github](https://github.com/buchmuseum/GND_Dashboard) zu finden. Die Diagramme wurden", "und Sie erhalten die verfügbaren Auswertungen und Statstiken. Verwenden Sie", "wird über einen Relationierungscode beschrieben. Hier sind die am häufigsten", "= st.slider('Jahresfilter', min_value=1400, max_value=int(musiker_orte['jahrzehnt'].max()), value=(1900), step=10) musik_filt= musiker_orte.loc[(musiker_orte['jahrzehnt'] == limiter)]", "Entitäten, die in den letzten 365 Tagen erstellt wurden if", "tooltip={\"html\": \"<b>{name}</b>\"})) st.subheader(f'TOP 10 Wirkungszentren der {limiter}er') col1, col2 =", "initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{name}</b>\"})) st.subheader(f'TOP 10 Wirkungszentren der {limiter}er') col1,", "return st.altair_chart(entity_count, use_container_width=True) def newcomer(): #TOP 10 der Entitäten, die", "st.pydeck_chart(pdk.Deck( scatterplotlayer, initial_view_state=INITIAL_VIEW_STATE, map_style=pdk.map_styles.LIGHT, tooltip={\"html\": \"<b>{Name}</b><br \\>Wirkungsort von {Anzahl} Personen\"}))", "Sie gehören zur Python Community der Deutschen Nationalbibliothek.') gnd_allgemein =", "Dashboards dokumentiert.') st.altair_chart(gnd_top, use_container_width=True) def dnb_links(): #GND-Verknüpfungen in DNB Titeldaten", "worte = {} for record in dict: worte.update({record['sachbegriff']:record['count']}) wc =", "f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f\"{links.replace(',','.')} intellektuell vergebene Verknüpfungen zu {uniques_str.replace(',','.')}", "GND-Entitäten in DNB-Titeldaten') top_daten = pd.read_csv(f'{path}/../stats/title_gnd_top10.csv', index_col=None) gnd_top = alt.Chart(top_daten).mark_bar().encode(", "'Anzahl'}, inplace=True) st.header('TOP Wirkungsorte von GND-Personen') st.markdown('Von allen Personensätzen (Tp)", "sort='y'), alt.Y('Anzahl'), alt.Color('Name:N', legend=alt.Legend(columns=2)), tooltip=[alt.Tooltip('Name:N', title='Ort'), alt.Tooltip('Anzahl:Q', title='Anzahl')] ) st.altair_chart(graph_count,", "= gnd_top_df.append(pd.read_csv(file, index_col=None)) return gnd_top_df def sachbegriff_cloud(): #wordcloud der top", "st.subheader('Relationen') st.write('GND-Datensätze können mit anderen Datensätzen verlinkt (»relationiert«) werden. Die", ") return st.altair_chart(created, use_container_width=True) def entities(): #GND-Entitäten nach Satzart und", "Der Gesamtabzug liegt im OCLC-Format PICA+ vor. Die Daten werden", "pickable=True, opacity=0.5, stroked=True, filled=True, radius_min_pixels=1, radius_max_pixels=100, line_width_min_pixels=1, get_position='[lon, lat]', get_radius=\"Anzahl\",", "alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP 10", "title='Notation', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title=\"Bezeichnung\"), tooltip=[alt.Tooltip('id', title='Notation'), alt.Tooltip('name',", "ca. ein weiteres Drittel wurde in maschinellen Erschließungsprozessen vergeben, ca.", "dokumentiert.') st.altair_chart(gnd_top, use_container_width=True) def dnb_links(): #GND-Verknüpfungen in DNB Titeldaten if", "fremd_entities = int(f.read()) #Anzahl der intellktuell verknüpften GND-Entitäten in DNB-Titeldaten", "Personen erstellt bzw. überprüft wurden.') return st.altair_chart(entity_count, use_container_width=True) def newcomer():", "tooltip=[alt.Tooltip('Name:N', title='Ort'), alt.Tooltip('Anzahl:Q', title='Anzahl')] ) st.altair_chart(graph_count, use_container_width=True) #Karte INITIAL_VIEW_STATE =", "Geografika\", \"Ts - Sachbegriffe\", \"Tu - Werke\", \"Tf - Veranstaltungen\")", "newcomer() #besondere widgets für einzelne satzarten if satzart == \"Tp", "st.subheader(f'Katalogisierungslevel in Satzart {satzart}') st.write('Alle GND-Entitäten können in verschiedenen Katalogisierungsleveln", "title='Anzahl'), alt.Color('code', sort='-y', title='Relationierungscode'), tooltip=[alt.Tooltip('count', title='Anzahl'), alt.Tooltip('code', title='Code')] ) st.altair_chart(relation_count,", "= int(f.read()) uniques_str = f'{uniques:,}' #Durchschnittliche Anzahl an GND-Verknüpfungen pro", "= [alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return st.altair_chart(classification_ts_count,", "In den Titeldaten ist auch der Tonträger- und Notenbestand des", "bearing=0 ) musiker_scatter = pdk.Layer( \"ScatterplotLayer\", musik_filt, opacity=0.8, get_position='[lon, lat]',", "entities() newcomer() zeitverlauf() relationen() systematik() else: entities() newcomer() #besondere widgets", "st.slider('Zeitraum', 1972, 2021, (1972,2021), 1) created = alt.Chart(created_at[f'{created_filt[0]}':f'{created_filt[1]}'].reset_index()).mark_line().encode( alt.X('created_at:T', title='Erstelldatum'),", "daraus abgeleitete Zentren der Musikkultur, dargestellt auf einer Karte musiker_orte", ") else: st.subheader(f'TOP 10 {satzart} in DNB-Titeldaten') top_daten = load_gnd_top_daten('top10')", "f'{int(f.read()):,}' st.write(f\"Relationen zwischen Entitäten gesamt: {relations.replace(',','.')}\") def systematik(): #Ranking der", "1972 created_at = pd.read_csv(f'{path}/../stats/gnd_created_at.csv', index_col='created_at', parse_dates=True, header=0, names=['created_at', 'count']) st.subheader('Zeitverlauf", "title='Datensätze pro Katalogisierungslevel'), alt.Y('entity', title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip( 'level', title='Katalogisierungslevel'),", "Person auf.') #Balkendiagramm orte_filt = st.slider('Zeige Top …', min_value=3, max_value=len(df),", "= st.slider('Zeige Top …', min_value=3, max_value=len(df), value=10, step=1) graph_count =", "Zentren, wie sie im Bestand der DNB repräsentiert sind.') limiter", "#main st.title('GND-Dashboard') #infoebereich oben with st.beta_container(): st.info('Hier finden Sie statistische", "df['level'] = df.entity.str[2:3] df.entity = df.entity.str[:2] if satzart == 'alle':", "desto verlässlicher die Daten, weil Sie dann von qualifizierten Personen", "wird die Anzahl der monatlich erstellten GND-Sätze aufgetragen. Die ersten", "= f'{uniques:,}' #Durchschnittliche Anzahl an GND-Verknüpfungen pro DNB-Titeldatensatz with open(f\"{path}/../stats/title_gnd_mean.csv\",", "Daten gespeichert. Alle Skripte und Daten stehen unter CC0 Lizenz", "der satzart if satzart == 'alle': gesamt_entity_count() entities() newcomer() zeitverlauf()", "alt.Chart(newcomer_daten.loc[newcomer_daten['bbg'].str.startswith(satzart[:2], na=False)]).mark_bar().encode( alt.X('gnd_id:O', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'),", "musik_filt.nlargest(10, 'norm').iterrows(): if i <= 5: with col1: st.write(f'{i}. {row[\"name\"]}')", "gefilterte auswertung der GND-Musikwerke, Musik-Personen und Wikrungsorte und daraus abgeleitete", "title='Satzart'), alt.Color('level', title='Katalogisierungslevel'), tooltip=[alt.Tooltip( 'level', title='Katalogisierungslevel'), alt.Tooltip('count', title='Anzahl')] ) st.subheader(f'Katalogisierungslevel", "title='Anzahl'), color='Datenart', tooltip='Anzahl:N' ) st.altair_chart(entities, use_container_width=True) else: with open(f\"{path}/../stats/title_gnd_mean_{satzart[:2]}.csv\", \"r\")", "with gnd_allgemein: st.header('GND Statistik allgemein') #allgemeine statistiken in abhängigkeit der", "= f'{int(f.read()):,}' st.write(f\"Relationen zwischen Entitäten gesamt: {relations.replace(',','.')}\") def systematik(): #Ranking", "dann von qualifizierten Personen erstellt bzw. überprüft wurden.') return st.altair_chart(entity_count,", "DNB-Titeldatensatz with open(f\"{path}/../stats/title_gnd_mean.csv\", \"r\") as f: mean = str(round(float(f.read()),2)).replace('.',',') st.write(f\"{links.replace(',','.')}", "def wirkungsorte_musik(): #nach jahrzehnten zwischen 1400 und 2010 gefilterte auswertung", "#sidebar mit satzartenfilter st.sidebar.header(\"Satzart wählen\") satzart = st.sidebar.selectbox( \"Über welche", "werden. Das Dashboard ist mit dem Python-Framework [Streamlit](https://streamlit.io/) geschrieben. Die", "ein weiteres Drittel wurde in maschinellen Erschließungsprozessen vergeben, ca. ein", "sort='-y', title=\"Bezeichnung\"), tooltip=[alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'), alt.Tooltip('count', title='Anzahl')] ) return", "sind die am häufigsten verwendeten Relationierungscodes zu sehen. Die Auflösung", "wurde in intellektuellen Erschließungsprozessen vergeben. Jeweils ca. ein weiteres Drittel", "Sachbegriffe') st.write('Die Entitäten der GND können in eine Systematik eingeordnet", "[hier](http://www.dnb.de/gndsyst).') class_ts_filt = st.slider('Zeige TOP …', min_value=5, max_value=len(classification_ts), value=10, step=1)", "DNB befinden. In den Titeldaten ist auch der Tonträger- und", "title='Anzahl'), alt.Color('name:N', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N', title='Entität'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('bbg:N', title='Satzart'),", "Statistik allgemein') #allgemeine statistiken in abhängigkeit der satzart if satzart", "Drittel der GND-Entitäten in DNB-Titeldaten wurde in intellektuellen Erschließungsprozessen vergeben.", "vor. Die Daten werden mithilfe des Pica-Parsers [pica.rs](https://github.com/deutsche-nationalbibliothek/pica-rs) gefiltert. Dieses", "und Statstiken. Verwenden Sie einen auf Chromium basierenden Browser.') with", "#GND-Entitäten maschinell verknüpft with open(f\"{path}/../stats/title_gnd_links_auto.csv\", \"r\") as f: auto_entites =", "Gemeinsamen Normadatei (GND) sowie der Titeldaten der Deutschen Nationalbibliothek (DNB)", "der Deutschen Nationalbibliothek (DNB) inkl. Zeitschriftendatenbank (ZDB), sofern sich Exemplare", "zeitverlauf(): #zeitverlauf der erstellung der GND-Sätze ab Januar 1972 created_at", "die verfügbaren Auswertungen und Statstiken. Verwenden Sie einen auf Chromium", "pro DNB-Titeldatensatz\") entity_df = pd.DataFrame.from_dict({\"intellektuell verknüpfte Entitäten\": uniques, \"Entitäten aus", "title='Anzahl'), alt.Color('name:N', sort='-y', title='Bezeichnung'), tooltip = [alt.Tooltip('id', title='Notation'), alt.Tooltip('name', title='Bezeichnung'),", "return st.altair_chart(classification_ts_count, use_container_width=True) def zeitverlauf(): #zeitverlauf der erstellung der GND-Sätze", "title='Anzahl')] ) st.write('Verknüpfungen, die maschinell erzeugt wurden, aus Fremddaten stammen", "inplace=True) st.header('TOP Wirkungsorte von GND-Personen') st.markdown('Von allen Personensätzen (Tp) weisen", "orient = \"index\").reset_index() entity_df = entity_df.rename(columns={\"index\":\"Datenart\", 0:\"Anzahl\"}) st.subheader('Datenherkunft der GND-Entitäten", "(1972,2021), 1) created = alt.Chart(created_at[f'{created_filt[0]}':f'{created_filt[1]}'].reset_index()).mark_line().encode( alt.X('created_at:T', title='Erstelldatum'), alt.Y('count:Q', title='Sätze pro", "auf [Github](https://github.com/buchmuseum/GND_Dashboard) zu finden. Die Diagramme wurden mit [Altair](https://altair-viz.github.io/index.html) erstellt,", "Deutschen Nationalbibliothek.') gnd_allgemein = st.beta_container() with gnd_allgemein: st.header('GND Statistik allgemein')", "und deren Wirkungszeiten erlaubt Rückschlüsse auf die musikalischen Zentren, wie", "auto_entites = int(f.read()) #GND-Entitäten aus Fremddaten with open(f\"{path}/../stats/title_gnd_links_ext.csv\", \"r\") as", "5, len(classification), 10, 1) classification_count = alt.Chart(classification.nlargest(class_filt, 'count', keep='all')).mark_bar().encode( alt.X('id',", "unter CC0 Lizenz und können frei weitergenutzt werden. Die Daten", "Zentren der Musikkultur, dargestellt auf einer Karte musiker_orte = pd.read_csv(f'{path}/musiker_orte.csv',", "= st.slider('Zeitraum', 1972, 2021, (1972,2021), 1) created = alt.Chart(created_at[f'{created_filt[0]}':f'{created_filt[1]}'].reset_index()).mark_line().encode( alt.X('created_at:T',", "#TOP 10 der Entitäten, die in den letzten 365 Tagen", "10, 1) classification_count = alt.Chart(classification.nlargest(class_filt, 'count', keep='all')).mark_bar().encode( alt.X('id', title='Notation', sort='-y'),", "title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) st.write('Verknüpfungen, die maschinell erzeugt wurden, aus", "keine personenbezogenen Daten gespeichert. Alle Skripte und Daten stehen unter", "stehen unter CC0 Lizenz und können frei weitergenutzt werden. Die", "Drittel stammt aus Fremddaten.') entities = alt.Chart(entity_df).mark_bar().encode( alt.X('sum(Datenart):N', title='Datenart'), alt.Y('sum(Anzahl):Q',", "df.rename(columns={'name': 'Name', 'count': 'Anzahl'}, inplace=True) st.header('TOP Wirkungsorte von GND-Personen') st.markdown('Von", "- Personen\": wirkungsorte() elif satzart == \"Tg - Geografika\": wirkungsorte_musik()", "Größe des Begriffes entspricht der Häufigkeit des Sachbegriffs.') files =", "2021). Wählen Sie links die Satzart, die Sie interessiert, und", "10 GND-Entitäten in DNB-Titeldaten') top_daten = pd.read_csv(f'{path}/../stats/title_gnd_top10.csv', index_col=None) gnd_top =", "Dashboard ist mit dem Python-Framework [Streamlit](https://streamlit.io/) geschrieben. Die Skripte sowie", "pro DNB-Titeldatensatz with open(f\"{path}/../stats/title_gnd_mean.csv\", \"r\") as f: mean = str(round(float(f.read()),2)).replace('.',',')", "line_width_min_pixels=1, get_position='[lon, lat]', get_radius=\"Anzahl\", get_fill_color=[255, 140, 0], get_line_color=[0, 0, 0]", "= pdk.ViewState( latitude=50.67877877706058, longitude=8.129981238464392, zoom=4.5, max_zoom=16, bearing=0 ) musiker_scatter =", "erstellung der GND-Sätze ab Januar 1972 created_at = pd.read_csv(f'{path}/../stats/gnd_created_at.csv', index_col='created_at',", "inplace=True) df.rename(columns={'name': 'Name', 'count': 'Anzahl'}, inplace=True) st.header('TOP Wirkungsorte von GND-Personen')", "der 100 meistverwendeten GND-Sachbegriffe dieses Tages. Die Größe des Begriffes", "return st.image(wc.to_array()) def wirkungsorte(): #ranking und karte der meistverwendeten wirkungsorte", "erzeugt wurden, aus Fremddaten stammen oder verwaist sind, wurden nicht", "alt.Tooltip('bbg:N', title='Satzart'), alt.Tooltip('gnd_id:N', title='IDN'), alt.Tooltip('count:Q', title='Anzahl')] ) else: st.subheader(f'TOP 10", "as f: entities = f'{int(f.read()):,}' return st.write(f\"GND-Entitäten gesamt: {entities.replace(',','.')}\") def", "GND-Musikwerke, Musik-Personen und Wikrungsorte und daraus abgeleitete Zentren der Musikkultur,", "beschrieben. Hier sind die am häufigsten verwendeten Relationierungscodes zu sehen.", "zu {satzart}-Sätzen pro DNB-Titeldatensatz') #main st.title('GND-Dashboard') #infoebereich oben with st.beta_container():", "Daten, weil Sie dann von qualifizierten Personen erstellt bzw. überprüft", "st.write('Auf einer Zeitleiste wird die Anzahl der monatlich erstellten GND-Sätze", "na=False)]).mark_bar().encode( alt.X('gnd_id:O', title='Entitäten', sort='-y'), alt.Y('count', title='Anzahl'), alt.Color('name', sort='-y', title='Entität'), tooltip=[alt.Tooltip('name:N'," ]
[ "processed. class NoParkingAvailableError(NoOverlapSpatialJoinError): def __init__(self, message): self.message = message class", "class NoOverlapSpatialJoinError(Exception): def __init__(self, message): self.message = message # msg:", "overlapping polygons representing geographic features # please how shapefiles are", "geocoder_input.py \"\"\" class OverlappingGeographyError(Exception): def __init__(self, message): self.message = message", "all excpetions used in following scripts - geocoder.py - geocoder_input.py", "NoOverlapSpatialJoinError(Exception): def __init__(self, message): self.message = message # msg: geodataframe", "def __init__(self, message): self.message = message class NotInSeattleError(Exception): def __init__(self,", "# please how shapefiles are processed. class NoParkingAvailableError(NoOverlapSpatialJoinError): def __init__(self,", "self.message = message class NotInSeattleError(Exception): def __init__(self, message): self.message =", "self.message = message # msg: geodataframe has overlapping polygons representing", "please how shapefiles are processed. class NoParkingAvailableError(NoOverlapSpatialJoinError): def __init__(self, message):", "for all excpetions used in following scripts - geocoder.py -", "in following scripts - geocoder.py - geocoder_input.py \"\"\" class OverlappingGeographyError(Exception):", "geocoder.py - geocoder_input.py \"\"\" class OverlappingGeographyError(Exception): def __init__(self, message): self.message", "please how shapefiles are processed. class NoOverlapSpatialJoinError(Exception): def __init__(self, message):", "message): self.message = message class NotInSeattleError(Exception): def __init__(self, message): self.message", "used in following scripts - geocoder.py - geocoder_input.py \"\"\" class", "are processed. class NoOverlapSpatialJoinError(Exception): def __init__(self, message): self.message = message", "following scripts - geocoder.py - geocoder_input.py \"\"\" class OverlappingGeographyError(Exception): def", "= message # msg: geodataframe has overlapping polygons representing geographic", "shapefiles are processed. class NoParkingAvailableError(NoOverlapSpatialJoinError): def __init__(self, message): self.message =", "are processed. class NoParkingAvailableError(NoOverlapSpatialJoinError): def __init__(self, message): self.message = message", "__init__(self, message): self.message = message class NoUrbanVillageError(Exception): def __init__(self, message):", "polygons representing geographic features # please how shapefiles are processed.", "class OverlappingGeographyError(Exception): def __init__(self, message): self.message = message # msg:", "- geocoder.py - geocoder_input.py \"\"\" class OverlappingGeographyError(Exception): def __init__(self, message):", "msg: geodataframe has overlapping polygons representing geographic features # please", "representing geographic features # please how shapefiles are processed. class", "# please how shapefiles are processed. class NoOverlapSpatialJoinError(Exception): def __init__(self,", "message # msg: geodataframe has overlapping polygons representing geographic features", "message class NoUrbanVillageError(Exception): def __init__(self, message): self.message = message class", "geographic features # please how shapefiles are processed. class NoParkingAvailableError(NoOverlapSpatialJoinError):", "features # please how shapefiles are processed. class NoParkingAvailableError(NoOverlapSpatialJoinError): def", "self.message = message class NoUrbanVillageError(Exception): def __init__(self, message): self.message =", "\"\"\" class OverlappingGeographyError(Exception): def __init__(self, message): self.message = message #", "excpetions used in following scripts - geocoder.py - geocoder_input.py \"\"\"", "processed. class NoOverlapSpatialJoinError(Exception): def __init__(self, message): self.message = message #", "message): self.message = message class NoUrbanVillageError(Exception): def __init__(self, message): self.message", "<filename>seamo/support/seamo_exceptions.py \"\"\" Class for all excpetions used in following scripts", "geodataframe has overlapping polygons representing geographic features # please how", "OverlappingGeographyError(Exception): def __init__(self, message): self.message = message # msg: geodataframe", "NoUrbanVillageError(Exception): def __init__(self, message): self.message = message class NotInSeattleError(Exception): def", "class NoUrbanVillageError(Exception): def __init__(self, message): self.message = message class NotInSeattleError(Exception):", "def __init__(self, message): self.message = message class NoUrbanVillageError(Exception): def __init__(self,", "class NoParkingAvailableError(NoOverlapSpatialJoinError): def __init__(self, message): self.message = message class NoUrbanVillageError(Exception):", "\"\"\" Class for all excpetions used in following scripts -", "message): self.message = message # msg: geodataframe has overlapping polygons", "features # please how shapefiles are processed. class NoOverlapSpatialJoinError(Exception): def", "__init__(self, message): self.message = message class NotInSeattleError(Exception): def __init__(self, message):", "scripts - geocoder.py - geocoder_input.py \"\"\" class OverlappingGeographyError(Exception): def __init__(self,", "geographic features # please how shapefiles are processed. class NoOverlapSpatialJoinError(Exception):", "how shapefiles are processed. class NoParkingAvailableError(NoOverlapSpatialJoinError): def __init__(self, message): self.message", "how shapefiles are processed. class NoOverlapSpatialJoinError(Exception): def __init__(self, message): self.message", "- geocoder_input.py \"\"\" class OverlappingGeographyError(Exception): def __init__(self, message): self.message =", "= message class NoUrbanVillageError(Exception): def __init__(self, message): self.message = message", "def __init__(self, message): self.message = message # msg: geodataframe has", "= message class NotInSeattleError(Exception): def __init__(self, message): self.message = message", "shapefiles are processed. class NoOverlapSpatialJoinError(Exception): def __init__(self, message): self.message =", "NoParkingAvailableError(NoOverlapSpatialJoinError): def __init__(self, message): self.message = message class NoUrbanVillageError(Exception): def", "__init__(self, message): self.message = message # msg: geodataframe has overlapping", "# msg: geodataframe has overlapping polygons representing geographic features #", "Class for all excpetions used in following scripts - geocoder.py", "has overlapping polygons representing geographic features # please how shapefiles" ]
[ "port = 42 s_soket = socket(AF_INET, SOCK_DGRAM) s_soket.bind((ip, port)) print(\"\\nSunucu", "istemciAdres = s_soket.recvfrom(4096) dosyaIcerigi = s_soket.recvfrom(4096) if(os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): s_soket.sendto(bytes(\"aynisi", "True): dosya = open(dosyaIsmi.decode(\"utf-8\"), \"rb\") s_soket.sendto(bytes(\"dosya mevcut\", encoding='utf-8'), istemciAdres) dosyaIcerik", "if(os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): s_soket.sendto(bytes(\"aynisi mevcut\", encoding='utf-8'), istemciAdres) karar = s_soket.recvfrom(4096)", "dosyaIsmi.decode(\"utf-8\")[:-4] + \" (kopya)\" + \".txt\" dosyaYeni = open(yeniAd, \"wb\")", "else: dosyaYeni = open(dosyaIsmi, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() s_soket.sendto(bytes(\"tamam\", encoding='utf-8'), istemciAdres)", "== \"mevcut\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) dosyaIcerigi = s_soket.recvfrom(4096) if(os.path.exists(dosyaIsmi.decode(\"utf-8\"))", "+ \".txt\" dosyaYeni = open(yeniAd, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() print(\"\\nPUT islemi", "if (os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): dosya = open(dosyaIsmi.decode(\"utf-8\"), \"rb\") s_soket.sendto(bytes(\"dosya mevcut\",", "= s_soket.recvfrom(4096) if(karar[0].decode(\"utf-8\") == \"1\"): yeniAd = dosyaIsmi.decode(\"utf-8\")[:-4] + \"", "os import sys import time from socket import * from", "== \"listeleme yap\"): dosyalar = \"\\n\".join(os.listdir()) s_soket.sendto(bytes(dosyalar, encoding='utf-8'), istemciAdres) sys.exit()", "gerceklesti..\") else: dosyaYeni = open(dosyaIsmi, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() s_soket.sendto(bytes(\"tamam\", encoding='utf-8'),", "== True): s_soket.sendto(bytes(\"aynisi mevcut\", encoding='utf-8'), istemciAdres) karar = s_soket.recvfrom(4096) if(karar[0].decode(\"utf-8\")", "\".txt\" dosyaYeni = open(yeniAd, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() print(\"\\nPUT islemi basariyla", "cevap = s_soket.recvfrom(4096) if(cevap[0].decode(\"utf-8\") == \"mevcut\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096)", "encoding='utf-8'), istemciAdres) print(\"\\nPUT islemi basariyla gerceklesti..\") else: print(\"\\nGirilen adda bir", "= dosyaIsmi.decode(\"utf-8\")[:-4] + \" (kopya)\" + \".txt\" dosyaYeni = open(yeniAd,", "= 42 s_soket = socket(AF_INET, SOCK_DGRAM) s_soket.bind((ip, port)) print(\"\\nSunucu Hazir\\n\")", "print(\"\\nPUT islemi basariyla gerceklesti..\") else: dosyaYeni = open(dosyaIsmi, \"wb\") dosyaYeni.write(dosyaIcerigi[0])", "* from os import system, name ip = '127.0.0.1' port", "dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) if (os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): dosya =", "dosyaIcerigi = s_soket.recvfrom(4096) if(os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): s_soket.sendto(bytes(\"aynisi mevcut\", encoding='utf-8'), istemciAdres)", "dosyaYeni.close() s_soket.sendto(bytes(\"tamam\", encoding='utf-8'), istemciAdres) print(\"\\nPUT islemi basariyla gerceklesti..\") else: print(\"\\nGirilen", "istemciAdres) sys.exit() elif(i.decode(\"utf-8\") == \"put yap\"): cevap = s_soket.recvfrom(4096) if(cevap[0].decode(\"utf-8\")", "= socket(AF_INET, SOCK_DGRAM) s_soket.bind((ip, port)) print(\"\\nSunucu Hazir\\n\") kontrol, istemciAdres =", "s_soket.sendto(bytes(\"tamam\", encoding='utf-8'), istemciAdres) print(\"\\nPUT islemi basariyla gerceklesti..\") else: print(\"\\nGirilen adda", "mevcut değil\") sys.exit() elif(i.decode(\"utf-8\") == \"bitir\"): s_soket.close() print(\"\\nSunucu kapandi\") sys.exit()", "\"get yap\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) if (os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True):", "dosyaYeni.close() print(\"\\nPUT islemi basariyla gerceklesti..\") else: dosyaYeni = open(dosyaIsmi, \"wb\")", "dosyaIcerik = dosya.read() dosya.close() s_soket.sendto(dosyaIcerik, istemciAdres) kontrol = s_soket.recvfrom(4096) print(\"\\nGET", "bir dosya sunucuda mevcut değil\") sys.exit() elif(i.decode(\"utf-8\") == \"bitir\"): s_soket.close()", "open(yeniAd, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() print(\"\\nPUT islemi basariyla gerceklesti..\") else: dosyaYeni", "gerceklesti..\") else: print(\"\\nGirilen adda bir dosya istemcide bulunamadi..\") elif(i.decode(\"utf-8\") ==", "if(i.decode(\"utf-8\") == \"listeleme yap\"): dosyalar = \"\\n\".join(os.listdir()) s_soket.sendto(bytes(dosyalar, encoding='utf-8'), istemciAdres)", "\"put yap\"): cevap = s_soket.recvfrom(4096) if(cevap[0].decode(\"utf-8\") == \"mevcut\"): dosyaIsmi, istemciAdres", "basariyla gerceklesti..\") else: dosyaYeni = open(dosyaIsmi, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() s_soket.sendto(bytes(\"tamam\",", "= s_soket.recvfrom(4096) if (os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): dosya = open(dosyaIsmi.decode(\"utf-8\"), \"rb\")", "karar = s_soket.recvfrom(4096) if(karar[0].decode(\"utf-8\") == \"1\"): yeniAd = dosyaIsmi.decode(\"utf-8\")[:-4] +", "encoding='utf-8'), istemciAdres) karar = s_soket.recvfrom(4096) if(karar[0].decode(\"utf-8\") == \"1\"): yeniAd =", "name ip = '127.0.0.1' port = 42 s_soket = socket(AF_INET,", "\" (kopya)\" + \".txt\" dosyaYeni = open(yeniAd, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close()", "42 s_soket = socket(AF_INET, SOCK_DGRAM) s_soket.bind((ip, port)) print(\"\\nSunucu Hazir\\n\") kontrol,", "yeniAd = dosyaIsmi.decode(\"utf-8\")[:-4] + \" (kopya)\" + \".txt\" dosyaYeni =", "sys.exit() else: print(\"\\n! Bu isimde bir dosya sunucuda mevcut değil\")", "'127.0.0.1' port = 42 s_soket = socket(AF_INET, SOCK_DGRAM) s_soket.bind((ip, port))", "== \"put yap\"): cevap = s_soket.recvfrom(4096) if(cevap[0].decode(\"utf-8\") == \"mevcut\"): dosyaIsmi,", "yap\"): cevap = s_soket.recvfrom(4096) if(cevap[0].decode(\"utf-8\") == \"mevcut\"): dosyaIsmi, istemciAdres =", "sys.exit() elif(i.decode(\"utf-8\") == \"put yap\"): cevap = s_soket.recvfrom(4096) if(cevap[0].decode(\"utf-8\") ==", "islemi basariyla gerceklesti..\") else: print(\"\\nGirilen adda bir dosya istemcide bulunamadi..\")", "gerceklesti..\") sys.exit() else: print(\"\\n! Bu isimde bir dosya sunucuda mevcut", "dosya.read() dosya.close() s_soket.sendto(dosyaIcerik, istemciAdres) kontrol = s_soket.recvfrom(4096) print(\"\\nGET islemi basariyla", "hazir\", encoding='utf-8'), istemciAdres) i, istemciAdres = s_soket.recvfrom(4096) if(i.decode(\"utf-8\") == \"listeleme", "= s_soket.recvfrom(4096) if(cevap[0].decode(\"utf-8\") == \"mevcut\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) dosyaIcerigi", "s_soket.recvfrom(4096) dosyaIcerigi = s_soket.recvfrom(4096) if(os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): s_soket.sendto(bytes(\"aynisi mevcut\", encoding='utf-8'),", "else: print(\"\\n! Bu isimde bir dosya sunucuda mevcut değil\") sys.exit()", "s_soket.recvfrom(4096) if(i.decode(\"utf-8\") == \"listeleme yap\"): dosyalar = \"\\n\".join(os.listdir()) s_soket.sendto(bytes(dosyalar, encoding='utf-8'),", "= s_soket.recvfrom(4096) s_soket.sendto(bytes(\"Sunucu hazir\", encoding='utf-8'), istemciAdres) i, istemciAdres = s_soket.recvfrom(4096)", "= s_soket.recvfrom(4096) print(\"\\nGET islemi basariyla gerceklesti..\") sys.exit() else: print(\"\\n! Bu", "dosya.close() s_soket.sendto(dosyaIcerik, istemciAdres) kontrol = s_soket.recvfrom(4096) print(\"\\nGET islemi basariyla gerceklesti..\")", "s_soket.sendto(bytes(\"Sunucu hazir\", encoding='utf-8'), istemciAdres) i, istemciAdres = s_soket.recvfrom(4096) if(i.decode(\"utf-8\") ==", "dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) dosyaIcerigi = s_soket.recvfrom(4096) if(os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True):", "s_soket.sendto(dosyaIcerik, istemciAdres) kontrol = s_soket.recvfrom(4096) print(\"\\nGET islemi basariyla gerceklesti..\") sys.exit()", "print(\"\\nGET islemi basariyla gerceklesti..\") sys.exit() else: print(\"\\n! Bu isimde bir", "import * from os import system, name ip = '127.0.0.1'", "= open(dosyaIsmi, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() s_soket.sendto(bytes(\"tamam\", encoding='utf-8'), istemciAdres) print(\"\\nPUT islemi", "basariyla gerceklesti..\") sys.exit() else: print(\"\\n! Bu isimde bir dosya sunucuda", "s_soket.sendto(bytes(\"aynisi mevcut\", encoding='utf-8'), istemciAdres) karar = s_soket.recvfrom(4096) if(karar[0].decode(\"utf-8\") == \"1\"):", "\"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() print(\"\\nPUT islemi basariyla gerceklesti..\") else: dosyaYeni =", "print(\"\\n! Bu isimde bir dosya sunucuda mevcut değil\") sys.exit() elif(i.decode(\"utf-8\")", "#<NAME> 150401052 import os import sys import time from socket", "print(\"\\nSunucu Hazir\\n\") kontrol, istemciAdres = s_soket.recvfrom(4096) s_soket.sendto(bytes(\"Sunucu hazir\", encoding='utf-8'), istemciAdres)", "mevcut\", encoding='utf-8'), istemciAdres) karar = s_soket.recvfrom(4096) if(karar[0].decode(\"utf-8\") == \"1\"): yeniAd", "sunucuda mevcut değil\") sys.exit() elif(i.decode(\"utf-8\") == \"bitir\"): s_soket.close() print(\"\\nSunucu kapandi\")", "system, name ip = '127.0.0.1' port = 42 s_soket =", "istemciAdres) karar = s_soket.recvfrom(4096) if(karar[0].decode(\"utf-8\") == \"1\"): yeniAd = dosyaIsmi.decode(\"utf-8\")[:-4]", "if(cevap[0].decode(\"utf-8\") == \"mevcut\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) dosyaIcerigi = s_soket.recvfrom(4096)", "= open(yeniAd, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() print(\"\\nPUT islemi basariyla gerceklesti..\") else:", "elif(i.decode(\"utf-8\") == \"put yap\"): cevap = s_soket.recvfrom(4096) if(cevap[0].decode(\"utf-8\") == \"mevcut\"):", "import time from socket import * from os import system,", "= open(dosyaIsmi.decode(\"utf-8\"), \"rb\") s_soket.sendto(bytes(\"dosya mevcut\", encoding='utf-8'), istemciAdres) dosyaIcerik = dosya.read()", "print(\"\\nPUT islemi basariyla gerceklesti..\") else: print(\"\\nGirilen adda bir dosya istemcide", "encoding='utf-8'), istemciAdres) dosyaIcerik = dosya.read() dosya.close() s_soket.sendto(dosyaIcerik, istemciAdres) kontrol =", "isimde bir dosya sunucuda mevcut değil\") sys.exit() elif(i.decode(\"utf-8\") == \"bitir\"):", "dosyalar = \"\\n\".join(os.listdir()) s_soket.sendto(bytes(dosyalar, encoding='utf-8'), istemciAdres) sys.exit() elif(i.decode(\"utf-8\") == \"put", "150401052 import os import sys import time from socket import", "dosyaYeni = open(yeniAd, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() print(\"\\nPUT islemi basariyla gerceklesti..\")", "s_soket.recvfrom(4096) if(karar[0].decode(\"utf-8\") == \"1\"): yeniAd = dosyaIsmi.decode(\"utf-8\")[:-4] + \" (kopya)\"", "(kopya)\" + \".txt\" dosyaYeni = open(yeniAd, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() print(\"\\nPUT", "dosya = open(dosyaIsmi.decode(\"utf-8\"), \"rb\") s_soket.sendto(bytes(\"dosya mevcut\", encoding='utf-8'), istemciAdres) dosyaIcerik =", "istemciAdres) kontrol = s_soket.recvfrom(4096) print(\"\\nGET islemi basariyla gerceklesti..\") sys.exit() else:", "i, istemciAdres = s_soket.recvfrom(4096) if(i.decode(\"utf-8\") == \"listeleme yap\"): dosyalar =", "import os import sys import time from socket import *", "yap\"): dosyalar = \"\\n\".join(os.listdir()) s_soket.sendto(bytes(dosyalar, encoding='utf-8'), istemciAdres) sys.exit() elif(i.decode(\"utf-8\") ==", "islemi basariyla gerceklesti..\") sys.exit() else: print(\"\\n! Bu isimde bir dosya", "import sys import time from socket import * from os", "mevcut\", encoding='utf-8'), istemciAdres) dosyaIcerik = dosya.read() dosya.close() s_soket.sendto(dosyaIcerik, istemciAdres) kontrol", "istemcide bulunamadi..\") elif(i.decode(\"utf-8\") == \"get yap\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096)", "time from socket import * from os import system, name", "socket import * from os import system, name ip =", "open(dosyaIsmi, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() s_soket.sendto(bytes(\"tamam\", encoding='utf-8'), istemciAdres) print(\"\\nPUT islemi basariyla", "= dosya.read() dosya.close() s_soket.sendto(dosyaIcerik, istemciAdres) kontrol = s_soket.recvfrom(4096) print(\"\\nGET islemi", "islemi basariyla gerceklesti..\") else: dosyaYeni = open(dosyaIsmi, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close()", "os import system, name ip = '127.0.0.1' port = 42", "\"rb\") s_soket.sendto(bytes(\"dosya mevcut\", encoding='utf-8'), istemciAdres) dosyaIcerik = dosya.read() dosya.close() s_soket.sendto(dosyaIcerik,", "elif(i.decode(\"utf-8\") == \"get yap\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) if (os.path.exists(dosyaIsmi.decode(\"utf-8\"))", "== \"get yap\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) if (os.path.exists(dosyaIsmi.decode(\"utf-8\")) ==", "= s_soket.recvfrom(4096) if(i.decode(\"utf-8\") == \"listeleme yap\"): dosyalar = \"\\n\".join(os.listdir()) s_soket.sendto(bytes(dosyalar,", "port)) print(\"\\nSunucu Hazir\\n\") kontrol, istemciAdres = s_soket.recvfrom(4096) s_soket.sendto(bytes(\"Sunucu hazir\", encoding='utf-8'),", "ip = '127.0.0.1' port = 42 s_soket = socket(AF_INET, SOCK_DGRAM)", "sys import time from socket import * from os import", "s_soket.sendto(bytes(\"dosya mevcut\", encoding='utf-8'), istemciAdres) dosyaIcerik = dosya.read() dosya.close() s_soket.sendto(dosyaIcerik, istemciAdres)", "socket(AF_INET, SOCK_DGRAM) s_soket.bind((ip, port)) print(\"\\nSunucu Hazir\\n\") kontrol, istemciAdres = s_soket.recvfrom(4096)", "import system, name ip = '127.0.0.1' port = 42 s_soket", "s_soket.recvfrom(4096) if(cevap[0].decode(\"utf-8\") == \"mevcut\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) dosyaIcerigi =", "dosyaYeni = open(dosyaIsmi, \"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() s_soket.sendto(bytes(\"tamam\", encoding='utf-8'), istemciAdres) print(\"\\nPUT", "else: print(\"\\nGirilen adda bir dosya istemcide bulunamadi..\") elif(i.decode(\"utf-8\") == \"get", "== True): dosya = open(dosyaIsmi.decode(\"utf-8\"), \"rb\") s_soket.sendto(bytes(\"dosya mevcut\", encoding='utf-8'), istemciAdres)", "= s_soket.recvfrom(4096) if(os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): s_soket.sendto(bytes(\"aynisi mevcut\", encoding='utf-8'), istemciAdres) karar", "istemciAdres) i, istemciAdres = s_soket.recvfrom(4096) if(i.decode(\"utf-8\") == \"listeleme yap\"): dosyalar", "bir dosya istemcide bulunamadi..\") elif(i.decode(\"utf-8\") == \"get yap\"): dosyaIsmi, istemciAdres", "True): s_soket.sendto(bytes(\"aynisi mevcut\", encoding='utf-8'), istemciAdres) karar = s_soket.recvfrom(4096) if(karar[0].decode(\"utf-8\") ==", "bulunamadi..\") elif(i.decode(\"utf-8\") == \"get yap\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) if", "Hazir\\n\") kontrol, istemciAdres = s_soket.recvfrom(4096) s_soket.sendto(bytes(\"Sunucu hazir\", encoding='utf-8'), istemciAdres) i,", "= \"\\n\".join(os.listdir()) s_soket.sendto(bytes(dosyalar, encoding='utf-8'), istemciAdres) sys.exit() elif(i.decode(\"utf-8\") == \"put yap\"):", "yap\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) if (os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): dosya", "\"1\"): yeniAd = dosyaIsmi.decode(\"utf-8\")[:-4] + \" (kopya)\" + \".txt\" dosyaYeni", "istemciAdres) print(\"\\nPUT islemi basariyla gerceklesti..\") else: print(\"\\nGirilen adda bir dosya", "from socket import * from os import system, name ip", "SOCK_DGRAM) s_soket.bind((ip, port)) print(\"\\nSunucu Hazir\\n\") kontrol, istemciAdres = s_soket.recvfrom(4096) s_soket.sendto(bytes(\"Sunucu", "(os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): dosya = open(dosyaIsmi.decode(\"utf-8\"), \"rb\") s_soket.sendto(bytes(\"dosya mevcut\", encoding='utf-8'),", "dosya sunucuda mevcut değil\") sys.exit() elif(i.decode(\"utf-8\") == \"bitir\"): s_soket.close() print(\"\\nSunucu", "istemciAdres = s_soket.recvfrom(4096) if(i.decode(\"utf-8\") == \"listeleme yap\"): dosyalar = \"\\n\".join(os.listdir())", "s_soket.recvfrom(4096) if(os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): s_soket.sendto(bytes(\"aynisi mevcut\", encoding='utf-8'), istemciAdres) karar =", "= '127.0.0.1' port = 42 s_soket = socket(AF_INET, SOCK_DGRAM) s_soket.bind((ip,", "istemciAdres = s_soket.recvfrom(4096) s_soket.sendto(bytes(\"Sunucu hazir\", encoding='utf-8'), istemciAdres) i, istemciAdres =", "s_soket.sendto(bytes(dosyalar, encoding='utf-8'), istemciAdres) sys.exit() elif(i.decode(\"utf-8\") == \"put yap\"): cevap =", "if(karar[0].decode(\"utf-8\") == \"1\"): yeniAd = dosyaIsmi.decode(\"utf-8\")[:-4] + \" (kopya)\" +", "s_soket.recvfrom(4096) if (os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): dosya = open(dosyaIsmi.decode(\"utf-8\"), \"rb\") s_soket.sendto(bytes(\"dosya", "basariyla gerceklesti..\") else: print(\"\\nGirilen adda bir dosya istemcide bulunamadi..\") elif(i.decode(\"utf-8\")", "s_soket.bind((ip, port)) print(\"\\nSunucu Hazir\\n\") kontrol, istemciAdres = s_soket.recvfrom(4096) s_soket.sendto(bytes(\"Sunucu hazir\",", "\"\\n\".join(os.listdir()) s_soket.sendto(bytes(dosyalar, encoding='utf-8'), istemciAdres) sys.exit() elif(i.decode(\"utf-8\") == \"put yap\"): cevap", "kontrol = s_soket.recvfrom(4096) print(\"\\nGET islemi basariyla gerceklesti..\") sys.exit() else: print(\"\\n!", "== \"1\"): yeniAd = dosyaIsmi.decode(\"utf-8\")[:-4] + \" (kopya)\" + \".txt\"", "\"wb\") dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() s_soket.sendto(bytes(\"tamam\", encoding='utf-8'), istemciAdres) print(\"\\nPUT islemi basariyla gerceklesti..\")", "istemciAdres) dosyaIcerik = dosya.read() dosya.close() s_soket.sendto(dosyaIcerik, istemciAdres) kontrol = s_soket.recvfrom(4096)", "= s_soket.recvfrom(4096) dosyaIcerigi = s_soket.recvfrom(4096) if(os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): s_soket.sendto(bytes(\"aynisi mevcut\",", "from os import system, name ip = '127.0.0.1' port =", "+ \" (kopya)\" + \".txt\" dosyaYeni = open(yeniAd, \"wb\") dosyaYeni.write(dosyaIcerigi[0])", "print(\"\\nGirilen adda bir dosya istemcide bulunamadi..\") elif(i.decode(\"utf-8\") == \"get yap\"):", "s_soket.recvfrom(4096) s_soket.sendto(bytes(\"Sunucu hazir\", encoding='utf-8'), istemciAdres) i, istemciAdres = s_soket.recvfrom(4096) if(i.decode(\"utf-8\")", "encoding='utf-8'), istemciAdres) i, istemciAdres = s_soket.recvfrom(4096) if(i.decode(\"utf-8\") == \"listeleme yap\"):", "open(dosyaIsmi.decode(\"utf-8\"), \"rb\") s_soket.sendto(bytes(\"dosya mevcut\", encoding='utf-8'), istemciAdres) dosyaIcerik = dosya.read() dosya.close()", "dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() s_soket.sendto(bytes(\"tamam\", encoding='utf-8'), istemciAdres) print(\"\\nPUT islemi basariyla gerceklesti..\") else:", "\"listeleme yap\"): dosyalar = \"\\n\".join(os.listdir()) s_soket.sendto(bytes(dosyalar, encoding='utf-8'), istemciAdres) sys.exit() elif(i.decode(\"utf-8\")", "s_soket.recvfrom(4096) print(\"\\nGET islemi basariyla gerceklesti..\") sys.exit() else: print(\"\\n! Bu isimde", "\"mevcut\"): dosyaIsmi, istemciAdres = s_soket.recvfrom(4096) dosyaIcerigi = s_soket.recvfrom(4096) if(os.path.exists(dosyaIsmi.decode(\"utf-8\")) ==", "istemciAdres = s_soket.recvfrom(4096) if (os.path.exists(dosyaIsmi.decode(\"utf-8\")) == True): dosya = open(dosyaIsmi.decode(\"utf-8\"),", "dosyaYeni.write(dosyaIcerigi[0]) dosyaYeni.close() print(\"\\nPUT islemi basariyla gerceklesti..\") else: dosyaYeni = open(dosyaIsmi,", "kontrol, istemciAdres = s_soket.recvfrom(4096) s_soket.sendto(bytes(\"Sunucu hazir\", encoding='utf-8'), istemciAdres) i, istemciAdres", "s_soket = socket(AF_INET, SOCK_DGRAM) s_soket.bind((ip, port)) print(\"\\nSunucu Hazir\\n\") kontrol, istemciAdres", "Bu isimde bir dosya sunucuda mevcut değil\") sys.exit() elif(i.decode(\"utf-8\") ==", "adda bir dosya istemcide bulunamadi..\") elif(i.decode(\"utf-8\") == \"get yap\"): dosyaIsmi,", "dosya istemcide bulunamadi..\") elif(i.decode(\"utf-8\") == \"get yap\"): dosyaIsmi, istemciAdres =", "encoding='utf-8'), istemciAdres) sys.exit() elif(i.decode(\"utf-8\") == \"put yap\"): cevap = s_soket.recvfrom(4096)" ]
[ "custom_order = [ \"method\", \"url\", \"params\", \"headers\", \"cookies\", \"data\", \"json\",", "Dict]) -> Dict: \"\"\" convert extract list(v2) to dict(v3) Args:", "deprecated argument: --failfast\") args.pop(args.index(\"--failfast\")) # convert --report-file to --html if", "args: logger.warning( f\"generate conftest.py keep compatibility with --save-tests in v2\"", "v3_extractors: Dict = {} if isinstance(extractors, List): # [{\"varA\": \"content.varA\"},", "in v2 \"\"\" # remove deprecated --failfast if \"--failfast\" in", "extractors: if not isinstance(extractor, Dict): logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for", "test_dict[\"setup_hooks\"] = step[\"setup_hooks\"] if \"teardown_hooks\" in step: test_dict[\"teardown_hooks\"] = step[\"teardown_hooks\"]", "Dict) -> Dict: logger.info(\"convert api in v2 to testcase format", "test_content[\"config\"], \"teststeps\": []} if \"teststeps\" not in test_content: logger.error(f\"Miss teststeps:", "teststep: {step}\") teststep.update(_ensure_step_attachment(step)) teststep = _sort_step_by_custom_order(teststep) v3_content[\"teststeps\"].append(teststep) return v3_content def", "conftest_content.replace( \"{{SUMMARY_PATH_PLACEHOLDER}}\", summary_path ) dir_path = os.path.dirname(conftest_path) if not os.path.exists(dir_path):", "logger.info(\"ensure compatibility with testcase format v2\") v3_content = {\"config\": test_content[\"config\"],", "break else: logger.error(f\"No valid test path specified! \\nargs: {args}\") sys.exit(1)", "file_relative_folder_path, test_file = os.path.split(test_path_relative_path) file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path) test_file_name, _", "cli args in v2 \"\"\" # remove deprecated --failfast if", "return sort_dict_by_custom_order(request, custom_order) def _sort_step_by_custom_order(step: Dict) -> Dict: custom_order =", "\"teardown_hooks\", \"extract\", \"validate\", \"validate_script\", ] return sort_dict_by_custom_order(step, custom_order) def _ensure_step_attachment(step:", "args def _generate_conftest_for_summary(args: List): for arg in args: if os.path.exists(arg):", "Dict: test_dict = { \"name\": step[\"name\"], } if \"variables\" in", "{\"varB\": \"json.varB\"}] Returns: {\"varA\": \"body.varA\", \"varB\": \"body.varB\"} \"\"\" v3_extractors: Dict", "if isinstance(raw_variables, Dict): return raw_variables if isinstance(raw_variables, List): # [{\"var1\":", "_sort_step_by_custom_order(teststep) v3_content[\"teststeps\"].append(teststep) return v3_content def ensure_cli_args(args: List) -> List: \"\"\"", "Linux and Windows \"\"\" if \"/\" in path: path =", "def session_fixture(request): \"\"\"setup and teardown each task\"\"\" logger.info(f\"start running testcases", "= f\"{test_file_name}.summary.json\" summary_path = os.path.join(file_foder_path, dump_file_name) conftest_content = conftest_content.replace( \"{{SUMMARY_PATH_PLACEHOLDER}}\",", "specified break else: logger.error(f\"No valid test path specified! \\nargs: {args}\")", "\"success\": 0, \"fail\": 0}, \"teststeps\": {\"total\": 0, \"failures\": 0, \"successes\":", "= raw_list.pop() item = f\"{last_item}[{item}]\" raw_list.append(item) else: raw_list.append(item) return \".\".join(raw_list)", "if not isinstance(raw, Text): raise exceptions.TestCaseFormatError(f\"Invalid jmespath extractor: {raw}\") #", "args[index] = \"--html\" args.append(\"--self-contained-html\") # keep compatibility with --save-tests in", "teststep validate: {step[\"validate\"]}' ) test_dict[\"validate\"] = _convert_validators(step[\"validate\"]) if \"validate_script\" in", "in step: test_dict[\"validate_script\"] = step[\"validate_script\"] return test_dict def ensure_testcase_v3_api(api_content: Dict)", "cls=ExtendJSONEncoder) logger.info(f\"generated task summary: {summary_path}\") ''' project_meta = load_project_meta(test_path) project_root_dir", "path = os.sep.join(path.split(\"/\")) if \"\\\\\" in path: path = os.sep.join(path.split(\"\\\\\"))", "{raw}\") sys.exit(1) last_item = raw_list.pop() item = f\"{last_item}[{item}]\" raw_list.append(item) else:", "return sort_dict_by_custom_order(step, custom_order) def _ensure_step_attachment(step: Dict) -> Dict: test_dict =", "compatibility with deprecated cli args in v2 \"\"\" # remove", "raw_list = [] for item in raw.split(\".\"): if \"-\" in", "ensure_testcase_v3_api(api_content: Dict) -> Dict: logger.info(\"convert api in v2 to testcase", "= {} for var_item in raw_variables: if not isinstance(var_item, Dict)", "0: logger.error(f\"Invalid jmespath: {raw}\") sys.exit(1) last_item = raw_list.pop() item =", "\"validate\", \"validate_script\", ] return sort_dict_by_custom_order(step, custom_order) def _ensure_step_attachment(step: Dict) ->", "return v3_content def ensure_cli_args(args: List) -> List: \"\"\" ensure compatibility", "f\"body{raw[len('content'):]}\" elif raw.startswith(\"json\"): raw = f\"body{raw[len('json'):]}\" raw_list = [] for", "f\"body{raw[len('json'):]}\" raw_list = [] for item in raw.split(\".\"): if \"-\"", "ensure_cli_args(args: List) -> List: \"\"\" ensure compatibility with deprecated cli", "argument: --failfast\") args.pop(args.index(\"--failfast\")) # convert --report-file to --html if \"--report-file\"", "{ \"config\": config, \"teststeps\": [teststep], } def ensure_testcase_v3(test_content: Dict) ->", "\"teststeps\": {\"total\": 0, \"failures\": 0, \"successes\": 0}, }, \"time\": {\"start_at\":", "extractor.items(): v3_extractors[k] = v elif isinstance(extractors, Dict): # {\"varA\": \"body.varA\",", "v3_extractors[k] = _convert_jmespath(v) return v3_extractors def _convert_validators(validators: List) -> List:", "{test_content[\"teststeps\"]}' ) sys.exit(1) for step in test_content[\"teststeps\"]: teststep = {}", "raise exceptions.TestCaseFormatError(f\"Invalid teststep: {step}\") teststep.update(_ensure_step_attachment(step)) teststep = _sort_step_by_custom_order(teststep) v3_content[\"teststeps\"].append(teststep) return", "Dict) -> Dict: logger.info(\"ensure compatibility with testcase format v2\") v3_content", "teststep.update(_ensure_step_attachment(api_content)) teststep = _sort_step_by_custom_order(teststep) config = {\"name\": api_content[\"name\"]} extract_variable_names: List", ") -> Dict[Text, Any]: if isinstance(raw_variables, Dict): return raw_variables if", "\"url\", \"params\", \"headers\", \"cookies\", \"data\", \"json\", \"files\", \"timeout\", \"allow_redirects\", \"proxies\",", "if \"validate_script\" in step: test_dict[\"validate_script\"] = step[\"validate_script\"] return test_dict def", "and \"expect\" in v: # format1: {\"check\": \"content.abc\", \"assert\": \"eq\",", "= extractors else: logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for k, v", "project_meta = load_project_meta(test_path) project_root_dir = project_meta.RootDir conftest_path = os.path.join(project_root_dir, \"conftest.py\")", "\"params\", \"headers\", \"cookies\", \"data\", \"json\", \"files\", \"timeout\", \"allow_redirects\", \"proxies\", \"verify\",", "'''# NOTICE: Generated By HttpRunner. import json import os import", "isinstance(raw_variables, Dict): return raw_variables if isinstance(raw_variables, List): # [{\"var1\": 1},", "= testcase_summary_json.pop(\"step_datas\") summary[\"details\"].append(testcase_summary_json) summary_path = r\"{{SUMMARY_PATH_PLACEHOLDER}}\" summary_dir = os.path.dirname(summary_path) os.makedirs(summary_dir,", "Text, Union, Any from loguru import logger from httprunner import", "\"auth\", \"cert\", ] return sort_dict_by_custom_order(request, custom_order) def _sort_step_by_custom_order(step: Dict) ->", "{raw_variables}\" ) def _convert_jmespath(raw: Text) -> Text: if not isinstance(raw,", ") dir_path = os.path.dirname(conftest_path) if not os.path.exists(dir_path): os.makedirs(dir_path) with open(conftest_path,", "api in v2 to testcase format v3\") teststep = {", "conftest_content = '''# NOTICE: Generated By HttpRunner. import json import", "and v3. 解决httprunner2 和 3 之间测试用例兼容性问题 \"\"\" import os import", "Dict, Text, Union, Any from loguru import logger from httprunner", "= v elif isinstance(extractors, Dict): # {\"varA\": \"body.varA\", \"varB\": \"body.varB\"}", "sys.exit(1) for step in test_content[\"teststeps\"]: teststep = {} if \"request\"", "if \"teststeps\" not in test_content: logger.error(f\"Miss teststeps: {test_content}\") sys.exit(1) if", "= _sort_step_by_custom_order(teststep) config = {\"name\": api_content[\"name\"]} extract_variable_names: List = list(teststep.get(\"extract\",", "= os.path.join(logs_dir_path, file_relative_folder_path) test_file_name, _ = os.path.splitext(test_file) dump_file_name = f\"{test_file_name}.summary.json\"", "${get_variables()} project_meta = load_project_meta(test_path) variables = parse_data(raw_variables, {}, project_meta.functions) return", "os.path.isdir(test_path): file_foder_path = os.path.join(logs_dir_path, test_path_relative_path) dump_file_name = \"all.summary.json\" else: file_relative_folder_path,", "--save-tests in v2\" ) args.pop(args.index(\"--save-tests\")) _generate_conftest_for_summary(args) return args def _generate_conftest_for_summary(args:", "raise exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\" ) def _convert_jmespath(raw: Text)", "in step: test_dict[\"setup_hooks\"] = step[\"setup_hooks\"] if \"teardown_hooks\" in step: test_dict[\"teardown_hooks\"]", "\"json.varB\"}] for extractor in extractors: if not isinstance(extractor, Dict): logger.error(f\"Invalid", "-> Dict: test_dict = { \"name\": step[\"name\"], } if \"variables\"", "if \"variables\" in step: test_dict[\"variables\"] = step[\"variables\"] if \"setup_hooks\" in", "loguru import logger from httprunner.utils import get_platform, ExtendJSONEncoder @pytest.fixture(scope=\"session\", autouse=True)", "{args}\") sys.exit(1) conftest_content = '''# NOTICE: Generated By HttpRunner. import", "\"testcase\" in step: teststep[\"testcase\"] = step.pop(\"testcase\") else: raise exceptions.TestCaseFormatError(f\"Invalid teststep:", "time.time() yield logger.info(f\"task finished, generate task summary for --save-tests\") summary", "jmespath: {raw}\") sys.exit(1) last_item = raw_list.pop() item = f\"{last_item}[{item}]\" raw_list.append(item)", "{'eq': ['status_code', 201]} comparator = list(v.keys())[0] v[comparator][0] = _convert_jmespath(v[comparator][0]) return", "extractors: [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] Returns: {\"varA\": \"body.varA\", \"varB\": \"body.varB\"}", "import get_platform, ExtendJSONEncoder @pytest.fixture(scope=\"session\", autouse=True) def session_fixture(request): \"\"\"setup and teardown", "sys.exit(1) for k, v in v3_extractors.items(): v3_extractors[k] = _convert_jmespath(v) return", "Text: if not isinstance(raw, Text): raise exceptions.TestCaseFormatError(f\"Invalid jmespath extractor: {raw}\")", "custom_order) def _sort_step_by_custom_order(step: Dict) -> Dict: custom_order = [ \"name\",", "1 testcase_summary_json = testcase_summary.dict() testcase_summary_json[\"records\"] = testcase_summary_json.pop(\"step_datas\") summary[\"details\"].append(testcase_summary_json) summary_path =", "convert extract list(v2) to dict(v3) Args: extractors: [{\"varA\": \"content.varA\"}, {\"varB\":", "sys.exit(1) if not isinstance(test_content[\"teststeps\"], list): logger.error( f'teststeps should be list", "args.index(\"--report-file\") args[index] = \"--html\" args.append(\"--self-contained-html\") # keep compatibility with --save-tests", "as f: json.dump(summary, f, indent=4, ensure_ascii=False, cls=ExtendJSONEncoder) logger.info(f\"generated task summary:", "\"all.summary.json\" else: file_relative_folder_path, test_file = os.path.split(test_path_relative_path) file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path)", "dump_file_name = f\"{test_file_name}.summary.json\" summary_path = os.path.join(file_foder_path, dump_file_name) conftest_content = conftest_content.replace(", "exceptions from httprunner.loader import load_project_meta, convert_relative_project_root_dir from httprunner.parser import parse_data", "v3_content[\"teststeps\"].append(teststep) return v3_content def ensure_cli_args(args: List) -> List: \"\"\" ensure", "{\"var2\": 2}] variables: Dict[Text, Any] = {} for var_item in", "project_root_dir = project_meta.RootDir conftest_path = os.path.join(project_root_dir, \"conftest.py\") test_path = os.path.abspath(test_path)", "start_at = time.time() yield logger.info(f\"task finished, generate task summary for", "@pytest.fixture(scope=\"session\", autouse=True) def session_fixture(request): \"\"\"setup and teardown each task\"\"\" logger.info(f\"start", ") test_dict[\"validate\"] = _convert_validators(step[\"validate\"]) if \"validate_script\" in step: test_dict[\"validate_script\"] =", "= [] for item in raw.split(\".\"): if \"-\" in item:", "step: teststep[\"testcase\"] = step.pop(\"api\") elif \"testcase\" in step: teststep[\"testcase\"] =", "def ensure_testcase_v3_api(api_content: Dict) -> Dict: logger.info(\"convert api in v2 to", "Union[Dict, List, Text], test_path: Text ) -> Dict[Text, Any]: if", "raw_list.append(f'\"{item}\"') elif item.isdigit(): # convert lst.0.name to lst[0].name if len(raw_list)", "summary.json\") def ensure_path_sep(path: Text) -> Text: \"\"\" ensure compatibility with", "f\"Invalid variables format: {raw_variables}\" ) variables.update(var_item) return variables elif isinstance(raw_variables,", "request.node.items: testcase_summary = item.instance.get_summary() summary[\"success\"] &= testcase_summary.success summary[\"stat\"][\"testcases\"][\"total\"] += 1", "item: # add quotes for field with separator # e.g.", "elif \"api\" in step: teststep[\"testcase\"] = step.pop(\"api\") elif \"testcase\" in", "if not isinstance(step[\"validate\"], List): raise exceptions.TestCaseFormatError( f'Invalid teststep validate: {step[\"validate\"]}'", "len( testcase_summary.step_datas ) else: summary[\"stat\"][\"testcases\"][\"fail\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += (", "_sort_request_by_custom_order(api_content[\"request\"]), } teststep.update(_ensure_step_attachment(api_content)) teststep = _sort_step_by_custom_order(teststep) config = {\"name\": api_content[\"name\"]}", "os.path.exists(dir_path): os.makedirs(dir_path) with open(conftest_path, \"w\", encoding=\"utf-8\") as f: f.write(conftest_content) logger.info(\"generated", "item.strip('\"') raw_list.append(f'\"{item}\"') elif item.isdigit(): # convert lst.0.name to lst[0].name if", "Any from loguru import logger from httprunner import exceptions from", "in args: if os.path.exists(arg): test_path = arg # FIXME: several", "{\"check\": \"content.abc\", \"assert\": \"eq\", \"expect\": 201} v[\"check\"] = _convert_jmespath(v[\"check\"]) elif", "with --save-tests in v2 if \"--save-tests\" in args: logger.warning( f\"generate", "open(summary_path, \"w\", encoding=\"utf-8\") as f: json.dump(summary, f, indent=4, ensure_ascii=False, cls=ExtendJSONEncoder)", "os.path.join(file_foder_path, dump_file_name) conftest_content = conftest_content.replace( \"{{SUMMARY_PATH_PLACEHOLDER}}\", summary_path ) dir_path =", "validators def _sort_request_by_custom_order(request: Dict) -> Dict: custom_order = [ \"method\",", "1: # format2: {'eq': ['status_code', 201]} comparator = list(v.keys())[0] v[comparator][0]", "format1: {\"check\": \"content.abc\", \"assert\": \"eq\", \"expect\": 201} v[\"check\"] = _convert_jmespath(v[\"check\"])", "}, \"time\": {\"start_at\": start_at, \"duration\": time.time() - start_at}, \"platform\": get_platform(),", "e.g. ${get_variables()} project_meta = load_project_meta(test_path) variables = parse_data(raw_variables, {}, project_meta.functions)", "\"success\": True, \"stat\": { \"testcases\": {\"total\": 0, \"success\": 0, \"fail\":", "extractor: {extractors}\") sys.exit(1) for k, v in extractor.items(): v3_extractors[k] =", "sys.exit(1) for k, v in extractor.items(): v3_extractors[k] = v elif", "summary[\"success\"] &= testcase_summary.success summary[\"stat\"][\"testcases\"][\"total\"] += 1 summary[\"stat\"][\"teststeps\"][\"total\"] += len(testcase_summary.step_datas) if", "format: {raw_variables}\" ) def _convert_jmespath(raw: Text) -> Text: if not", "v[\"check\"] = _convert_jmespath(v[\"check\"]) elif len(v) == 1: # format2: {'eq':", "+= len(testcase_summary.step_datas) if testcase_summary.success: summary[\"stat\"][\"testcases\"][\"success\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += len(", "\"\"\" # remove deprecated --failfast if \"--failfast\" in args: logger.warning(f\"remove", "conftest_content = conftest_content.replace( \"{{SUMMARY_PATH_PLACEHOLDER}}\", summary_path ) dir_path = os.path.dirname(conftest_path) if", "\"expect\": 201} v[\"check\"] = _convert_jmespath(v[\"check\"]) elif len(v) == 1: #", "1}, {\"var2\": 2}] variables: Dict[Text, Any] = {} for var_item", "\"stat\": { \"testcases\": {\"total\": 0, \"success\": 0, \"fail\": 0}, \"teststeps\":", "step: teststep[\"request\"] = _sort_request_by_custom_order(step.pop(\"request\")) elif \"api\" in step: teststep[\"testcase\"] =", "os.path.dirname(conftest_path) if not os.path.exists(dir_path): os.makedirs(dir_path) with open(conftest_path, \"w\", encoding=\"utf-8\") as", "-> Dict: custom_order = [ \"name\", \"variables\", \"request\", \"testcase\", \"setup_hooks\",", "def _sort_request_by_custom_order(request: Dict) -> Dict: custom_order = [ \"method\", \"url\",", "Union, Any from loguru import logger from httprunner import exceptions", "= _convert_validators(step[\"validate\"]) if \"validate_script\" in step: test_dict[\"validate_script\"] = step[\"validate_script\"] return", "r\"{{SUMMARY_PATH_PLACEHOLDER}}\" summary_dir = os.path.dirname(summary_path) os.makedirs(summary_dir, exist_ok=True) with open(summary_path, \"w\", encoding=\"utf-8\")", "{\"start_at\": start_at, \"duration\": time.time() - start_at}, \"platform\": get_platform(), \"details\": [],", "This module handles compatibility issues between testcase format v2 and", "for --save-tests\") summary = { \"success\": True, \"stat\": { \"testcases\":", "-> Text: if not isinstance(raw, Text): raise exceptions.TestCaseFormatError(f\"Invalid jmespath extractor:", "else: raw_list.append(item) return \".\".join(raw_list) def _convert_extractors(extractors: Union[List, Dict]) -> Dict:", "not os.path.exists(dir_path): os.makedirs(dir_path) with open(conftest_path, \"w\", encoding=\"utf-8\") as f: f.write(conftest_content)", "\"\"\"setup and teardown each task\"\"\" logger.info(f\"start running testcases ...\") start_at", "format2: {'eq': ['status_code', 201]} comparator = list(v.keys())[0] v[comparator][0] = _convert_jmespath(v[comparator][0])", "{type(test_content[\"teststeps\"])}: {test_content[\"teststeps\"]}' ) sys.exit(1) for step in test_content[\"teststeps\"]: teststep =", "for v in validators: if \"check\" in v and \"expect\"", "args: logger.warning(f\"remove deprecated argument: --failfast\") args.pop(args.index(\"--failfast\")) # convert --report-file to", "= \"--html\" args.append(\"--self-contained-html\") # keep compatibility with --save-tests in v2", "def ensure_path_sep(path: Text) -> Text: \"\"\" ensure compatibility with different", "_convert_jmespath(v[comparator][0]) return validators def _sort_request_by_custom_order(request: Dict) -> Dict: custom_order =", "\\nargs: {args}\") sys.exit(1) conftest_content = '''# NOTICE: Generated By HttpRunner.", "\"\"\" ensure compatibility with different path separators of Linux and", "config = {\"name\": api_content[\"name\"]} extract_variable_names: List = list(teststep.get(\"extract\", {}).keys()) if", "\"body.varA\", \"varB\": \"body.varB\"} \"\"\" v3_extractors: Dict = {} if isinstance(extractors,", "in step: teststep[\"testcase\"] = step.pop(\"api\") elif \"testcase\" in step: teststep[\"testcase\"]", "load_project_meta, convert_relative_project_root_dir from httprunner.parser import parse_data from httprunner.utils import sort_dict_by_custom_order", "module handles compatibility issues between testcase format v2 and v3.", "Dict: logger.info(\"convert api in v2 to testcase format v3\") teststep", "step.pop(\"api\") elif \"testcase\" in step: teststep[\"testcase\"] = step.pop(\"testcase\") else: raise", "with --save-tests in v2\" ) args.pop(args.index(\"--save-tests\")) _generate_conftest_for_summary(args) return args def", "\"teardown_hooks\" in step: test_dict[\"teardown_hooks\"] = step[\"teardown_hooks\"] if \"extract\" in step:", "convert_relative_project_root_dir from httprunner.parser import parse_data from httprunner.utils import sort_dict_by_custom_order def", "{ \"success\": True, \"stat\": { \"testcases\": {\"total\": 0, \"success\": 0,", "import List, Dict, Text, Union, Any from loguru import logger", "= os.path.join(project_root_dir, \"logs\") test_path_relative_path = convert_relative_project_root_dir(test_path) if os.path.isdir(test_path): file_foder_path =", "\"\"\" import os import sys from typing import List, Dict,", "headers.\"Content-Type\" item = item.strip('\"') raw_list.append(f'\"{item}\"') elif item.isdigit(): # convert lst.0.name", "ensure compatibility with different path separators of Linux and Windows", "os.path.exists(arg): test_path = arg # FIXME: several test paths maybe", "== 1: # format2: {'eq': ['status_code', 201]} comparator = list(v.keys())[0]", "variables: Dict[Text, Any] = {} for var_item in raw_variables: if", "logger.error(f\"Invalid jmespath: {raw}\") sys.exit(1) last_item = raw_list.pop() item = f\"{last_item}[{item}]\"", "_sort_step_by_custom_order(step: Dict) -> Dict: custom_order = [ \"name\", \"variables\", \"request\",", "[ \"method\", \"url\", \"params\", \"headers\", \"cookies\", \"data\", \"json\", \"files\", \"timeout\",", "sys.exit(1) last_item = raw_list.pop() item = f\"{last_item}[{item}]\" raw_list.append(item) else: raw_list.append(item)", "\"method\", \"url\", \"params\", \"headers\", \"cookies\", \"data\", \"json\", \"files\", \"timeout\", \"allow_redirects\",", "isinstance(var_item, Dict) or len(var_item) != 1: raise exceptions.TestCaseFormatError( f\"Invalid variables", "Text) -> Text: if not isinstance(raw, Text): raise exceptions.TestCaseFormatError(f\"Invalid jmespath", "= args.index(\"--report-file\") args[index] = \"--html\" args.append(\"--self-contained-html\") # keep compatibility with", "List, Dict, Text, Union, Any from loguru import logger from", ") else: summary[\"stat\"][\"testcases\"][\"fail\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += ( len(testcase_summary.step_datas) -", "raise exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\" ) variables.update(var_item) return variables", "if os.path.exists(arg): test_path = arg # FIXME: several test paths", "finished, generate task summary for --save-tests\") summary = { \"success\":", "args.pop(args.index(\"--failfast\")) # convert --report-file to --html if \"--report-file\" in args:", "args: logger.warning(f\"replace deprecated argument --report-file with --html\") index = args.index(\"--report-file\")", "['status_code', 201]} comparator = list(v.keys())[0] v[comparator][0] = _convert_jmespath(v[comparator][0]) return validators", "summary_path ) dir_path = os.path.dirname(conftest_path) if not os.path.exists(dir_path): os.makedirs(dir_path) with", "logger from httprunner import exceptions from httprunner.loader import load_project_meta, convert_relative_project_root_dir", "os.path.join(project_root_dir, \"conftest.py\") test_path = os.path.abspath(test_path) logs_dir_path = os.path.join(project_root_dir, \"logs\") test_path_relative_path", "in extractors: if not isinstance(extractor, Dict): logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1)", "= f\"{last_item}[{item}]\" raw_list.append(item) else: raw_list.append(item) return \".\".join(raw_list) def _convert_extractors(extractors: Union[List,", "summary_dir = os.path.dirname(summary_path) os.makedirs(summary_dir, exist_ok=True) with open(summary_path, \"w\", encoding=\"utf-8\") as", "_convert_jmespath(raw: Text) -> Text: if not isinstance(raw, Text): raise exceptions.TestCaseFormatError(f\"Invalid", "exist_ok=True) with open(summary_path, \"w\", encoding=\"utf-8\") as f: json.dump(summary, f, indent=4,", "step: test_dict[\"variables\"] = step[\"variables\"] if \"setup_hooks\" in step: test_dict[\"setup_hooks\"] =", ") variables.update(var_item) return variables elif isinstance(raw_variables, Text): # get variables", "# FIXME: several test paths maybe specified break else: logger.error(f\"No", "step.pop(\"testcase\") else: raise exceptions.TestCaseFormatError(f\"Invalid teststep: {step}\") teststep.update(_ensure_step_attachment(step)) teststep = _sort_step_by_custom_order(teststep)", "file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path) test_file_name, _ = os.path.splitext(test_file) dump_file_name =", "= step[\"variables\"] if \"setup_hooks\" in step: test_dict[\"setup_hooks\"] = step[\"setup_hooks\"] if", "0}, \"teststeps\": {\"total\": 0, \"failures\": 0, \"successes\": 0}, }, \"time\":", "in path: path = os.sep.join(path.split(\"/\")) if \"\\\\\" in path: path", "v: # format1: {\"check\": \"content.abc\", \"assert\": \"eq\", \"expect\": 201} v[\"check\"]", "valid test path specified! \\nargs: {args}\") sys.exit(1) conftest_content = '''#", "\"timeout\", \"allow_redirects\", \"proxies\", \"verify\", \"stream\", \"auth\", \"cert\", ] return sort_dict_by_custom_order(request,", "in step: test_dict[\"variables\"] = step[\"variables\"] if \"setup_hooks\" in step: test_dict[\"setup_hooks\"]", "\"allow_redirects\", \"proxies\", \"verify\", \"stream\", \"auth\", \"cert\", ] return sort_dict_by_custom_order(request, custom_order)", "\"validate\" in step: if not isinstance(step[\"validate\"], List): raise exceptions.TestCaseFormatError( f'Invalid", "= [ \"name\", \"variables\", \"request\", \"testcase\", \"setup_hooks\", \"teardown_hooks\", \"extract\", \"validate\",", "= [ \"method\", \"url\", \"params\", \"headers\", \"cookies\", \"data\", \"json\", \"files\",", "test_dict[\"export\"] = step[\"export\"] if \"validate\" in step: if not isinstance(step[\"validate\"],", "= { \"request\": _sort_request_by_custom_order(api_content[\"request\"]), } teststep.update(_ensure_step_attachment(api_content)) teststep = _sort_step_by_custom_order(teststep) config", "get_platform, ExtendJSONEncoder @pytest.fixture(scope=\"session\", autouse=True) def session_fixture(request): \"\"\"setup and teardown each", "exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\" ) def _convert_jmespath(raw: Text) ->", "\"json.varB\"}] Returns: {\"varA\": \"body.varA\", \"varB\": \"body.varB\"} \"\"\" v3_extractors: Dict =", "test_dict = { \"name\": step[\"name\"], } if \"variables\" in step:", "not isinstance(extractor, Dict): logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for k, v", "headers.Content-Type => headers.\"Content-Type\" item = item.strip('\"') raw_list.append(f'\"{item}\"') elif item.isdigit(): #", "elif len(v) == 1: # format2: {'eq': ['status_code', 201]} comparator", "test_dict[\"validate_script\"] = step[\"validate_script\"] return test_dict def ensure_testcase_v3_api(api_content: Dict) -> Dict:", "start_at, \"duration\": time.time() - start_at}, \"platform\": get_platform(), \"details\": [], }", "\"body.varB\"} \"\"\" v3_extractors: Dict = {} if isinstance(extractors, List): #", "exceptions.TestCaseFormatError(f\"Invalid teststep: {step}\") teststep.update(_ensure_step_attachment(step)) teststep = _sort_step_by_custom_order(teststep) v3_content[\"teststeps\"].append(teststep) return v3_content", "Dict[Text, Any]: if isinstance(raw_variables, Dict): return raw_variables if isinstance(raw_variables, List):", "task summary: {summary_path}\") ''' project_meta = load_project_meta(test_path) project_root_dir = project_meta.RootDir", "{ \"testcases\": {\"total\": 0, \"success\": 0, \"fail\": 0}, \"teststeps\": {\"total\":", "Dict): return raw_variables if isinstance(raw_variables, List): # [{\"var1\": 1}, {\"var2\":", "# remove deprecated --failfast if \"--failfast\" in args: logger.warning(f\"remove deprecated", "return { \"config\": config, \"teststeps\": [teststep], } def ensure_testcase_v3(test_content: Dict)", "\"name\": step[\"name\"], } if \"variables\" in step: test_dict[\"variables\"] = step[\"variables\"]", "logger.info(\"generated conftest.py to generate summary.json\") def ensure_path_sep(path: Text) -> Text:", "encoding=\"utf-8\") as f: f.write(conftest_content) logger.info(\"generated conftest.py to generate summary.json\") def", "raw.startswith(\"json\"): raw = f\"body{raw[len('json'):]}\" raw_list = [] for item in", "len(testcase_summary.step_datas) - 1 ) summary[\"stat\"][\"teststeps\"][\"failures\"] += 1 testcase_summary_json = testcase_summary.dict()", "extract_variable_names return { \"config\": config, \"teststeps\": [teststep], } def ensure_testcase_v3(test_content:", "if not isinstance(extractor, Dict): logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for k,", "{} if \"request\" in step: teststep[\"request\"] = _sort_request_by_custom_order(step.pop(\"request\")) elif \"api\"", "_convert_validators(step[\"validate\"]) if \"validate_script\" in step: test_dict[\"validate_script\"] = step[\"validate_script\"] return test_dict", "] return sort_dict_by_custom_order(step, custom_order) def _ensure_step_attachment(step: Dict) -> Dict: test_dict", "\"content.varA\"}, {\"varB\": \"json.varB\"}] Returns: {\"varA\": \"body.varA\", \"varB\": \"body.varB\"} \"\"\" v3_extractors:", "-> Dict: logger.info(\"ensure compatibility with testcase format v2\") v3_content =", "= _sort_step_by_custom_order(teststep) v3_content[\"teststeps\"].append(teststep) return v3_content def ensure_cli_args(args: List) -> List:", "task summary for --save-tests\") summary = { \"success\": True, \"stat\":", "return test_dict def ensure_testcase_v3_api(api_content: Dict) -> Dict: logger.info(\"convert api in", "v and \"expect\" in v: # format1: {\"check\": \"content.abc\", \"assert\":", "os.path.abspath(test_path) logs_dir_path = os.path.join(project_root_dir, \"logs\") test_path_relative_path = convert_relative_project_root_dir(test_path) if os.path.isdir(test_path):", "Args: extractors: [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] Returns: {\"varA\": \"body.varA\", \"varB\":", "isinstance(extractors, Dict): # {\"varA\": \"body.varA\", \"varB\": \"body.varB\"} v3_extractors = extractors", "generate summary.json\") def ensure_path_sep(path: Text) -> Text: \"\"\" ensure compatibility", "validators: if \"check\" in v and \"expect\" in v: #", "variables by function, e.g. ${get_variables()} project_meta = load_project_meta(test_path) variables =", "running testcases ...\") start_at = time.time() yield logger.info(f\"task finished, generate", "return validators def _sort_request_by_custom_order(request: Dict) -> Dict: custom_order = [", "testcases ...\") start_at = time.time() yield logger.info(f\"task finished, generate task", "+= 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += len( testcase_summary.step_datas ) else: summary[\"stat\"][\"testcases\"][\"fail\"] +=", "test_path_relative_path) dump_file_name = \"all.summary.json\" else: file_relative_folder_path, test_file = os.path.split(test_path_relative_path) file_foder_path", "if os.path.isdir(test_path): file_foder_path = os.path.join(logs_dir_path, test_path_relative_path) dump_file_name = \"all.summary.json\" else:", "os.path.join(logs_dir_path, file_relative_folder_path) test_file_name, _ = os.path.splitext(test_file) dump_file_name = f\"{test_file_name}.summary.json\" summary_path", "summary: {summary_path}\") ''' project_meta = load_project_meta(test_path) project_root_dir = project_meta.RootDir conftest_path", "''' project_meta = load_project_meta(test_path) project_root_dir = project_meta.RootDir conftest_path = os.path.join(project_root_dir,", "variables format: {raw_variables}\" ) variables.update(var_item) return variables elif isinstance(raw_variables, Text):", "body.xx if raw.startswith(\"content\"): raw = f\"body{raw[len('content'):]}\" elif raw.startswith(\"json\"): raw =", "\".\".join(raw_list) def _convert_extractors(extractors: Union[List, Dict]) -> Dict: \"\"\" convert extract", "path specified! \\nargs: {args}\") sys.exit(1) conftest_content = '''# NOTICE: Generated", "isinstance(raw_variables, Text): # get variables by function, e.g. ${get_variables()} project_meta", "v3_extractors.items(): v3_extractors[k] = _convert_jmespath(v) return v3_extractors def _convert_validators(validators: List) ->", "\"{{SUMMARY_PATH_PLACEHOLDER}}\", summary_path ) dir_path = os.path.dirname(conftest_path) if not os.path.exists(dir_path): os.makedirs(dir_path)", "[] for item in raw.split(\".\"): if \"-\" in item: #", "{raw}\") # content.xx/json.xx => body.xx if raw.startswith(\"content\"): raw = f\"body{raw[len('content'):]}\"", "\"variables\", \"request\", \"testcase\", \"setup_hooks\", \"teardown_hooks\", \"extract\", \"validate\", \"validate_script\", ] return", "List = list(teststep.get(\"extract\", {}).keys()) if extract_variable_names: config[\"export\"] = extract_variable_names return", "\"--failfast\" in args: logger.warning(f\"remove deprecated argument: --failfast\") args.pop(args.index(\"--failfast\")) # convert", "} def ensure_testcase_v3(test_content: Dict) -> Dict: logger.info(\"ensure compatibility with testcase", "-> Dict: custom_order = [ \"method\", \"url\", \"params\", \"headers\", \"cookies\",", "{\"config\": test_content[\"config\"], \"teststeps\": []} if \"teststeps\" not in test_content: logger.error(f\"Miss", "elif \"testcase\" in step: teststep[\"testcase\"] = step.pop(\"testcase\") else: raise exceptions.TestCaseFormatError(f\"Invalid", "from httprunner.parser import parse_data from httprunner.utils import sort_dict_by_custom_order def convert_variables(", "to lst[0].name if len(raw_list) == 0: logger.error(f\"Invalid jmespath: {raw}\") sys.exit(1)", "= os.path.join(file_foder_path, dump_file_name) conftest_content = conftest_content.replace( \"{{SUMMARY_PATH_PLACEHOLDER}}\", summary_path ) dir_path", "with open(conftest_path, \"w\", encoding=\"utf-8\") as f: f.write(conftest_content) logger.info(\"generated conftest.py to", "exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\" ) variables.update(var_item) return variables elif", "[{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] for extractor in extractors: if not", "in item: # add quotes for field with separator #", "def ensure_testcase_v3(test_content: Dict) -> Dict: logger.info(\"ensure compatibility with testcase format", "step[\"name\"], } if \"variables\" in step: test_dict[\"variables\"] = step[\"variables\"] if", "test_path = os.path.abspath(test_path) logs_dir_path = os.path.join(project_root_dir, \"logs\") test_path_relative_path = convert_relative_project_root_dir(test_path)", "\"testcase\", \"setup_hooks\", \"teardown_hooks\", \"extract\", \"validate\", \"validate_script\", ] return sort_dict_by_custom_order(step, custom_order)", "} for item in request.node.items: testcase_summary = item.instance.get_summary() summary[\"success\"] &=", "in args: logger.warning(f\"replace deprecated argument --report-file with --html\") index =", "= convert_relative_project_root_dir(test_path) if os.path.isdir(test_path): file_foder_path = os.path.join(logs_dir_path, test_path_relative_path) dump_file_name =", "arg in args: if os.path.exists(arg): test_path = arg # FIXME:", "in test_content: logger.error(f\"Miss teststeps: {test_content}\") sys.exit(1) if not isinstance(test_content[\"teststeps\"], list):", "in step: if not isinstance(step[\"validate\"], List): raise exceptions.TestCaseFormatError( f'Invalid teststep", "--save-tests in v2 if \"--save-tests\" in args: logger.warning( f\"generate conftest.py", "_generate_conftest_for_summary(args) return args def _generate_conftest_for_summary(args: List): for arg in args:", "dump_file_name = \"all.summary.json\" else: file_relative_folder_path, test_file = os.path.split(test_path_relative_path) file_foder_path =", "logs_dir_path = os.path.join(project_root_dir, \"logs\") test_path_relative_path = convert_relative_project_root_dir(test_path) if os.path.isdir(test_path): file_foder_path", "\"extract\", \"validate\", \"validate_script\", ] return sort_dict_by_custom_order(step, custom_order) def _ensure_step_attachment(step: Dict)", "exceptions.TestCaseFormatError( f'Invalid teststep validate: {step[\"validate\"]}' ) test_dict[\"validate\"] = _convert_validators(step[\"validate\"]) if", "def _convert_jmespath(raw: Text) -> Text: if not isinstance(raw, Text): raise", "if \"-\" in item: # add quotes for field with", "f\"{test_file_name}.summary.json\" summary_path = os.path.join(file_foder_path, dump_file_name) conftest_content = conftest_content.replace( \"{{SUMMARY_PATH_PLACEHOLDER}}\", summary_path", "step in test_content[\"teststeps\"]: teststep = {} if \"request\" in step:", "in args: logger.warning(f\"remove deprecated argument: --failfast\") args.pop(args.index(\"--failfast\")) # convert --report-file", "test_file_name, _ = os.path.splitext(test_file) dump_file_name = f\"{test_file_name}.summary.json\" summary_path = os.path.join(file_foder_path,", "\"-\" in item: # add quotes for field with separator", "\"--report-file\" in args: logger.warning(f\"replace deprecated argument --report-file with --html\") index", "Dict) -> Dict: custom_order = [ \"name\", \"variables\", \"request\", \"testcase\",", "import os import sys from typing import List, Dict, Text,", "if not os.path.exists(dir_path): os.makedirs(dir_path) with open(conftest_path, \"w\", encoding=\"utf-8\") as f:", "\"testcases\": {\"total\": 0, \"success\": 0, \"fail\": 0}, \"teststeps\": {\"total\": 0,", "v3_extractors def _convert_validators(validators: List) -> List: for v in validators:", "from httprunner import exceptions from httprunner.loader import load_project_meta, convert_relative_project_root_dir from", "\"w\", encoding=\"utf-8\") as f: f.write(conftest_content) logger.info(\"generated conftest.py to generate summary.json\")", "\"duration\": time.time() - start_at}, \"platform\": get_platform(), \"details\": [], } for", "summary = { \"success\": True, \"stat\": { \"testcases\": {\"total\": 0,", "step: test_dict[\"validate_script\"] = step[\"validate_script\"] return test_dict def ensure_testcase_v3_api(api_content: Dict) ->", "between testcase format v2 and v3. 解决httprunner2 和 3 之间测试用例兼容性问题", "in step: test_dict[\"extract\"] = _convert_extractors(step[\"extract\"]) if \"export\" in step: test_dict[\"export\"]", "= _sort_request_by_custom_order(step.pop(\"request\")) elif \"api\" in step: teststep[\"testcase\"] = step.pop(\"api\") elif", "specified! \\nargs: {args}\") sys.exit(1) conftest_content = '''# NOTICE: Generated By", "if \"export\" in step: test_dict[\"export\"] = step[\"export\"] if \"validate\" in", "\"headers\", \"cookies\", \"data\", \"json\", \"files\", \"timeout\", \"allow_redirects\", \"proxies\", \"verify\", \"stream\",", "\"files\", \"timeout\", \"allow_redirects\", \"proxies\", \"verify\", \"stream\", \"auth\", \"cert\", ] return", "= extract_variable_names return { \"config\": config, \"teststeps\": [teststep], } def", "{\"varA\": \"body.varA\", \"varB\": \"body.varB\"} v3_extractors = extractors else: logger.error(f\"Invalid extractor:", "import sys from typing import List, Dict, Text, Union, Any", "test_dict[\"validate\"] = _convert_validators(step[\"validate\"]) if \"validate_script\" in step: test_dict[\"validate_script\"] = step[\"validate_script\"]", "deprecated argument --report-file with --html\") index = args.index(\"--report-file\") args[index] =", "and teardown each task\"\"\" logger.info(f\"start running testcases ...\") start_at =", "to --html if \"--report-file\" in args: logger.warning(f\"replace deprecated argument --report-file", "summary[\"stat\"][\"teststeps\"][\"successes\"] += len( testcase_summary.step_datas ) else: summary[\"stat\"][\"testcases\"][\"fail\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"]", "<filename>httprunner/compat.py<gh_stars>0 \"\"\" This module handles compatibility issues between testcase format", "\"\"\" v3_extractors: Dict = {} if isinstance(extractors, List): # [{\"varA\":", "Dict: custom_order = [ \"name\", \"variables\", \"request\", \"testcase\", \"setup_hooks\", \"teardown_hooks\",", "isinstance(step[\"validate\"], List): raise exceptions.TestCaseFormatError( f'Invalid teststep validate: {step[\"validate\"]}' ) test_dict[\"validate\"]", "-> Dict: logger.info(\"convert api in v2 to testcase format v3\")", "testcase_summary.dict() testcase_summary_json[\"records\"] = testcase_summary_json.pop(\"step_datas\") summary[\"details\"].append(testcase_summary_json) summary_path = r\"{{SUMMARY_PATH_PLACEHOLDER}}\" summary_dir =", "extract_variable_names: List = list(teststep.get(\"extract\", {}).keys()) if extract_variable_names: config[\"export\"] = extract_variable_names", "[{\"var1\": 1}, {\"var2\": 2}] variables: Dict[Text, Any] = {} for", "with different path separators of Linux and Windows \"\"\" if", "if testcase_summary.success: summary[\"stat\"][\"testcases\"][\"success\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += len( testcase_summary.step_datas )", "\"body.varB\"} v3_extractors = extractors else: logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for", "httprunner.utils import sort_dict_by_custom_order def convert_variables( raw_variables: Union[Dict, List, Text], test_path:", "if \"setup_hooks\" in step: test_dict[\"setup_hooks\"] = step[\"setup_hooks\"] if \"teardown_hooks\" in", "os import sys from typing import List, Dict, Text, Union,", "path: path = os.sep.join(path.split(\"/\")) if \"\\\\\" in path: path =", "by function, e.g. ${get_variables()} project_meta = load_project_meta(test_path) variables = parse_data(raw_variables,", "with separator # e.g. headers.Content-Type => headers.\"Content-Type\" item = item.strip('\"')", "\"successes\": 0}, }, \"time\": {\"start_at\": start_at, \"duration\": time.time() - start_at},", "validate: {step[\"validate\"]}' ) test_dict[\"validate\"] = _convert_validators(step[\"validate\"]) if \"validate_script\" in step:", "in raw_variables: if not isinstance(var_item, Dict) or len(var_item) != 1:", "os.sep.join(path.split(\"/\")) if \"\\\\\" in path: path = os.sep.join(path.split(\"\\\\\")) return path", "os.path.dirname(summary_path) os.makedirs(summary_dir, exist_ok=True) with open(summary_path, \"w\", encoding=\"utf-8\") as f: json.dump(summary,", "[ \"name\", \"variables\", \"request\", \"testcase\", \"setup_hooks\", \"teardown_hooks\", \"extract\", \"validate\", \"validate_script\",", "format v2\") v3_content = {\"config\": test_content[\"config\"], \"teststeps\": []} if \"teststeps\"", "content.xx/json.xx => body.xx if raw.startswith(\"content\"): raw = f\"body{raw[len('content'):]}\" elif raw.startswith(\"json\"):", "last_item = raw_list.pop() item = f\"{last_item}[{item}]\" raw_list.append(item) else: raw_list.append(item) return", "if \"teardown_hooks\" in step: test_dict[\"teardown_hooks\"] = step[\"teardown_hooks\"] if \"extract\" in", "deprecated --failfast if \"--failfast\" in args: logger.warning(f\"remove deprecated argument: --failfast\")", "v2\" ) args.pop(args.index(\"--save-tests\")) _generate_conftest_for_summary(args) return args def _generate_conftest_for_summary(args: List): for", "item.instance.get_summary() summary[\"success\"] &= testcase_summary.success summary[\"stat\"][\"testcases\"][\"total\"] += 1 summary[\"stat\"][\"teststeps\"][\"total\"] += len(testcase_summary.step_datas)", "parse_data from httprunner.utils import sort_dict_by_custom_order def convert_variables( raw_variables: Union[Dict, List,", "config, \"teststeps\": [teststep], } def ensure_testcase_v3(test_content: Dict) -> Dict: logger.info(\"ensure", "if not isinstance(test_content[\"teststeps\"], list): logger.error( f'teststeps should be list type,", "= load_project_meta(test_path) project_root_dir = project_meta.RootDir conftest_path = os.path.join(project_root_dir, \"conftest.py\") test_path", "= f\"body{raw[len('content'):]}\" elif raw.startswith(\"json\"): raw = f\"body{raw[len('json'):]}\" raw_list = []", "[], } for item in request.node.items: testcase_summary = item.instance.get_summary() summary[\"success\"]", "v2 to testcase format v3\") teststep = { \"request\": _sort_request_by_custom_order(api_content[\"request\"]),", "add quotes for field with separator # e.g. headers.Content-Type =>", "from loguru import logger from httprunner import exceptions from httprunner.loader", "By HttpRunner. import json import os import time import pytest", "typing import List, Dict, Text, Union, Any from loguru import", "k, v in extractor.items(): v3_extractors[k] = v elif isinstance(extractors, Dict):", "&= testcase_summary.success summary[\"stat\"][\"testcases\"][\"total\"] += 1 summary[\"stat\"][\"teststeps\"][\"total\"] += len(testcase_summary.step_datas) if testcase_summary.success:", "extractor: {raw}\") # content.xx/json.xx => body.xx if raw.startswith(\"content\"): raw =", "separators of Linux and Windows \"\"\" if \"/\" in path:", "file_foder_path = os.path.join(logs_dir_path, test_path_relative_path) dump_file_name = \"all.summary.json\" else: file_relative_folder_path, test_file", "def ensure_cli_args(args: List) -> List: \"\"\" ensure compatibility with deprecated", "import load_project_meta, convert_relative_project_root_dir from httprunner.parser import parse_data from httprunner.utils import", "201]} comparator = list(v.keys())[0] v[comparator][0] = _convert_jmespath(v[comparator][0]) return validators def", "extractor: {extractors}\") sys.exit(1) for k, v in v3_extractors.items(): v3_extractors[k] =", "in step: test_dict[\"export\"] = step[\"export\"] if \"validate\" in step: if", "= { \"success\": True, \"stat\": { \"testcases\": {\"total\": 0, \"success\":", "1 summary[\"stat\"][\"teststeps\"][\"successes\"] += len( testcase_summary.step_datas ) else: summary[\"stat\"][\"testcases\"][\"fail\"] += 1", "if \"--report-file\" in args: logger.warning(f\"replace deprecated argument --report-file with --html\")", "indent=4, ensure_ascii=False, cls=ExtendJSONEncoder) logger.info(f\"generated task summary: {summary_path}\") ''' project_meta =", "if \"check\" in v and \"expect\" in v: # format1:", "if not isinstance(var_item, Dict) or len(var_item) != 1: raise exceptions.TestCaseFormatError(", "= os.path.split(test_path_relative_path) file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path) test_file_name, _ = os.path.splitext(test_file)", "conftest.py keep compatibility with --save-tests in v2\" ) args.pop(args.index(\"--save-tests\")) _generate_conftest_for_summary(args)", "\"api\" in step: teststep[\"testcase\"] = step.pop(\"api\") elif \"testcase\" in step:", "+= 1 summary[\"stat\"][\"teststeps\"][\"total\"] += len(testcase_summary.step_datas) if testcase_summary.success: summary[\"stat\"][\"testcases\"][\"success\"] += 1", "logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for k, v in extractor.items(): v3_extractors[k]", "raw.startswith(\"content\"): raw = f\"body{raw[len('content'):]}\" elif raw.startswith(\"json\"): raw = f\"body{raw[len('json'):]}\" raw_list", "_convert_validators(validators: List) -> List: for v in validators: if \"check\"", "raw = f\"body{raw[len('content'):]}\" elif raw.startswith(\"json\"): raw = f\"body{raw[len('json'):]}\" raw_list =", "v[comparator][0] = _convert_jmespath(v[comparator][0]) return validators def _sort_request_by_custom_order(request: Dict) -> Dict:", "\"failures\": 0, \"successes\": 0}, }, \"time\": {\"start_at\": start_at, \"duration\": time.time()", "testcase_summary_json.pop(\"step_datas\") summary[\"details\"].append(testcase_summary_json) summary_path = r\"{{SUMMARY_PATH_PLACEHOLDER}}\" summary_dir = os.path.dirname(summary_path) os.makedirs(summary_dir, exist_ok=True)", "project_meta = load_project_meta(test_path) variables = parse_data(raw_variables, {}, project_meta.functions) return variables", "\"validate_script\", ] return sort_dict_by_custom_order(step, custom_order) def _ensure_step_attachment(step: Dict) -> Dict:", "testcase format v2 and v3. 解决httprunner2 和 3 之间测试用例兼容性问题 \"\"\"", "= {} if \"request\" in step: teststep[\"request\"] = _sort_request_by_custom_order(step.pop(\"request\")) elif", "issues between testcase format v2 and v3. 解决httprunner2 和 3", "= step.pop(\"testcase\") else: raise exceptions.TestCaseFormatError(f\"Invalid teststep: {step}\") teststep.update(_ensure_step_attachment(step)) teststep =", "convert_relative_project_root_dir(test_path) if os.path.isdir(test_path): file_foder_path = os.path.join(logs_dir_path, test_path_relative_path) dump_file_name = \"all.summary.json\"", "summary[\"stat\"][\"testcases\"][\"fail\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += ( len(testcase_summary.step_datas) - 1 )", "3 之间测试用例兼容性问题 \"\"\" import os import sys from typing import", "test_path_relative_path = convert_relative_project_root_dir(test_path) if os.path.isdir(test_path): file_foder_path = os.path.join(logs_dir_path, test_path_relative_path) dump_file_name", "if \"/\" in path: path = os.sep.join(path.split(\"/\")) if \"\\\\\" in", "dump_file_name) conftest_content = conftest_content.replace( \"{{SUMMARY_PATH_PLACEHOLDER}}\", summary_path ) dir_path = os.path.dirname(conftest_path)", "raw_variables: Union[Dict, List, Text], test_path: Text ) -> Dict[Text, Any]:", "teststep = {} if \"request\" in step: teststep[\"request\"] = _sort_request_by_custom_order(step.pop(\"request\"))", "_generate_conftest_for_summary(args: List): for arg in args: if os.path.exists(arg): test_path =", "len(var_item) != 1: raise exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\" )", "= load_project_meta(test_path) variables = parse_data(raw_variables, {}, project_meta.functions) return variables else:", "Returns: {\"varA\": \"body.varA\", \"varB\": \"body.varB\"} \"\"\" v3_extractors: Dict = {}", "# format2: {'eq': ['status_code', 201]} comparator = list(v.keys())[0] v[comparator][0] =", "# convert lst.0.name to lst[0].name if len(raw_list) == 0: logger.error(f\"Invalid", "\"verify\", \"stream\", \"auth\", \"cert\", ] return sort_dict_by_custom_order(request, custom_order) def _sort_step_by_custom_order(step:", "\"teststeps\": []} if \"teststeps\" not in test_content: logger.error(f\"Miss teststeps: {test_content}\")", "for step in test_content[\"teststeps\"]: teststep = {} if \"request\" in", "step[\"setup_hooks\"] if \"teardown_hooks\" in step: test_dict[\"teardown_hooks\"] = step[\"teardown_hooks\"] if \"extract\"", "f: f.write(conftest_content) logger.info(\"generated conftest.py to generate summary.json\") def ensure_path_sep(path: Text)", "= testcase_summary.dict() testcase_summary_json[\"records\"] = testcase_summary_json.pop(\"step_datas\") summary[\"details\"].append(testcase_summary_json) summary_path = r\"{{SUMMARY_PATH_PLACEHOLDER}}\" summary_dir", "- 1 ) summary[\"stat\"][\"teststeps\"][\"failures\"] += 1 testcase_summary_json = testcase_summary.dict() testcase_summary_json[\"records\"]", "= {} if isinstance(extractors, List): # [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}]", "def convert_variables( raw_variables: Union[Dict, List, Text], test_path: Text ) ->", "{\"total\": 0, \"failures\": 0, \"successes\": 0}, }, \"time\": {\"start_at\": start_at,", "elif item.isdigit(): # convert lst.0.name to lst[0].name if len(raw_list) ==", "args.append(\"--self-contained-html\") # keep compatibility with --save-tests in v2 if \"--save-tests\"", "sort_dict_by_custom_order(step, custom_order) def _ensure_step_attachment(step: Dict) -> Dict: test_dict = {", "logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for k, v in v3_extractors.items(): v3_extractors[k]", "{ \"name\": step[\"name\"], } if \"variables\" in step: test_dict[\"variables\"] =", "\"cert\", ] return sort_dict_by_custom_order(request, custom_order) def _sort_step_by_custom_order(step: Dict) -> Dict:", "- start_at}, \"platform\": get_platform(), \"details\": [], } for item in", "elif raw.startswith(\"json\"): raw = f\"body{raw[len('json'):]}\" raw_list = [] for item", "{ \"request\": _sort_request_by_custom_order(api_content[\"request\"]), } teststep.update(_ensure_step_attachment(api_content)) teststep = _sort_step_by_custom_order(teststep) config =", "v2 if \"--save-tests\" in args: logger.warning( f\"generate conftest.py keep compatibility", "len(raw_list) == 0: logger.error(f\"Invalid jmespath: {raw}\") sys.exit(1) last_item = raw_list.pop()", "{summary_path}\") ''' project_meta = load_project_meta(test_path) project_root_dir = project_meta.RootDir conftest_path =", "for k, v in v3_extractors.items(): v3_extractors[k] = _convert_jmespath(v) return v3_extractors", "test_path: Text ) -> Dict[Text, Any]: if isinstance(raw_variables, Dict): return", "testcase_summary_json[\"records\"] = testcase_summary_json.pop(\"step_datas\") summary[\"details\"].append(testcase_summary_json) summary_path = r\"{{SUMMARY_PATH_PLACEHOLDER}}\" summary_dir = os.path.dirname(summary_path)", "file_relative_folder_path) test_file_name, _ = os.path.splitext(test_file) dump_file_name = f\"{test_file_name}.summary.json\" summary_path =", "load_project_meta(test_path) variables = parse_data(raw_variables, {}, project_meta.functions) return variables else: raise", "+= len( testcase_summary.step_datas ) else: summary[\"stat\"][\"testcases\"][\"fail\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] +=", "compatibility issues between testcase format v2 and v3. 解决httprunner2 和", "test_content[\"teststeps\"]: teststep = {} if \"request\" in step: teststep[\"request\"] =", "summary[\"stat\"][\"teststeps\"][\"failures\"] += 1 testcase_summary_json = testcase_summary.dict() testcase_summary_json[\"records\"] = testcase_summary_json.pop(\"step_datas\") summary[\"details\"].append(testcase_summary_json)", "ensure_testcase_v3(test_content: Dict) -> Dict: logger.info(\"ensure compatibility with testcase format v2\")", "len(testcase_summary.step_datas) if testcase_summary.success: summary[\"stat\"][\"testcases\"][\"success\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += len( testcase_summary.step_datas", "not isinstance(step[\"validate\"], List): raise exceptions.TestCaseFormatError( f'Invalid teststep validate: {step[\"validate\"]}' )", "format v2 and v3. 解决httprunner2 和 3 之间测试用例兼容性问题 \"\"\" import", "in test_content[\"teststeps\"]: teststep = {} if \"request\" in step: teststep[\"request\"]", "teststep = _sort_step_by_custom_order(teststep) config = {\"name\": api_content[\"name\"]} extract_variable_names: List =", "handles compatibility issues between testcase format v2 and v3. 解决httprunner2", "comparator = list(v.keys())[0] v[comparator][0] = _convert_jmespath(v[comparator][0]) return validators def _sort_request_by_custom_order(request:", "dict(v3) Args: extractors: [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] Returns: {\"varA\": \"body.varA\",", "--html\") index = args.index(\"--report-file\") args[index] = \"--html\" args.append(\"--self-contained-html\") # keep", "isinstance(raw_variables, List): # [{\"var1\": 1}, {\"var2\": 2}] variables: Dict[Text, Any]", "= list(v.keys())[0] v[comparator][0] = _convert_jmespath(v[comparator][0]) return validators def _sort_request_by_custom_order(request: Dict)", "in extractor.items(): v3_extractors[k] = v elif isinstance(extractors, Dict): # {\"varA\":", "\"teststeps\" not in test_content: logger.error(f\"Miss teststeps: {test_content}\") sys.exit(1) if not", "{}).keys()) if extract_variable_names: config[\"export\"] = extract_variable_names return { \"config\": config,", "test paths maybe specified break else: logger.error(f\"No valid test path", "-> List: \"\"\" ensure compatibility with deprecated cli args in", "\"content.varA\"}, {\"varB\": \"json.varB\"}] for extractor in extractors: if not isinstance(extractor,", "{\"total\": 0, \"success\": 0, \"fail\": 0}, \"teststeps\": {\"total\": 0, \"failures\":", "= '''# NOTICE: Generated By HttpRunner. import json import os", "-> List: for v in validators: if \"check\" in v", "list(v.keys())[0] v[comparator][0] = _convert_jmespath(v[comparator][0]) return validators def _sort_request_by_custom_order(request: Dict) ->", "task\"\"\" logger.info(f\"start running testcases ...\") start_at = time.time() yield logger.info(f\"task", "custom_order = [ \"name\", \"variables\", \"request\", \"testcase\", \"setup_hooks\", \"teardown_hooks\", \"extract\",", "encoding=\"utf-8\") as f: json.dump(summary, f, indent=4, ensure_ascii=False, cls=ExtendJSONEncoder) logger.info(f\"generated task", "or len(var_item) != 1: raise exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\"", "extract_variable_names: config[\"export\"] = extract_variable_names return { \"config\": config, \"teststeps\": [teststep],", "testcase_summary = item.instance.get_summary() summary[\"success\"] &= testcase_summary.success summary[\"stat\"][\"testcases\"][\"total\"] += 1 summary[\"stat\"][\"teststeps\"][\"total\"]", "test_dict def ensure_testcase_v3_api(api_content: Dict) -> Dict: logger.info(\"convert api in v2", "list): logger.error( f'teststeps should be list type, got {type(test_content[\"teststeps\"])}: {test_content[\"teststeps\"]}'", "testcase_summary.success: summary[\"stat\"][\"testcases\"][\"success\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += len( testcase_summary.step_datas ) else:", "= _convert_jmespath(v[comparator][0]) return validators def _sort_request_by_custom_order(request: Dict) -> Dict: custom_order", "\"setup_hooks\", \"teardown_hooks\", \"extract\", \"validate\", \"validate_script\", ] return sort_dict_by_custom_order(step, custom_order) def", "Any]: if isinstance(raw_variables, Dict): return raw_variables if isinstance(raw_variables, List): #", "\"teststeps\": [teststep], } def ensure_testcase_v3(test_content: Dict) -> Dict: logger.info(\"ensure compatibility", "not in test_content: logger.error(f\"Miss teststeps: {test_content}\") sys.exit(1) if not isinstance(test_content[\"teststeps\"],", "compatibility with testcase format v2\") v3_content = {\"config\": test_content[\"config\"], \"teststeps\":", "{\"varA\": \"body.varA\", \"varB\": \"body.varB\"} \"\"\" v3_extractors: Dict = {} if", "\"varB\": \"body.varB\"} v3_extractors = extractors else: logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1)", "= step[\"export\"] if \"validate\" in step: if not isinstance(step[\"validate\"], List):", "os.makedirs(dir_path) with open(conftest_path, \"w\", encoding=\"utf-8\") as f: f.write(conftest_content) logger.info(\"generated conftest.py", "raw = f\"body{raw[len('json'):]}\" raw_list = [] for item in raw.split(\".\"):", "v2\") v3_content = {\"config\": test_content[\"config\"], \"teststeps\": []} if \"teststeps\" not", "f: json.dump(summary, f, indent=4, ensure_ascii=False, cls=ExtendJSONEncoder) logger.info(f\"generated task summary: {summary_path}\")", "\"body.varA\", \"varB\": \"body.varB\"} v3_extractors = extractors else: logger.error(f\"Invalid extractor: {extractors}\")", "= os.path.join(logs_dir_path, test_path_relative_path) dump_file_name = \"all.summary.json\" else: file_relative_folder_path, test_file =", "= step[\"setup_hooks\"] if \"teardown_hooks\" in step: test_dict[\"teardown_hooks\"] = step[\"teardown_hooks\"] if", "with open(summary_path, \"w\", encoding=\"utf-8\") as f: json.dump(summary, f, indent=4, ensure_ascii=False,", "= conftest_content.replace( \"{{SUMMARY_PATH_PLACEHOLDER}}\", summary_path ) dir_path = os.path.dirname(conftest_path) if not", "if isinstance(raw_variables, List): # [{\"var1\": 1}, {\"var2\": 2}] variables: Dict[Text,", "# [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] for extractor in extractors: if", "List) -> List: \"\"\" ensure compatibility with deprecated cli args", "[{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] Returns: {\"varA\": \"body.varA\", \"varB\": \"body.varB\"} \"\"\"", "def _convert_validators(validators: List) -> List: for v in validators: if", "v in extractor.items(): v3_extractors[k] = v elif isinstance(extractors, Dict): #", "return variables elif isinstance(raw_variables, Text): # get variables by function,", "\"\"\" ensure compatibility with deprecated cli args in v2 \"\"\"", "step: test_dict[\"setup_hooks\"] = step[\"setup_hooks\"] if \"teardown_hooks\" in step: test_dict[\"teardown_hooks\"] =", "config[\"export\"] = extract_variable_names return { \"config\": config, \"teststeps\": [teststep], }", "+= 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += ( len(testcase_summary.step_datas) - 1 ) summary[\"stat\"][\"teststeps\"][\"failures\"]", "def _generate_conftest_for_summary(args: List): for arg in args: if os.path.exists(arg): test_path", "testcase_summary_json = testcase_summary.dict() testcase_summary_json[\"records\"] = testcase_summary_json.pop(\"step_datas\") summary[\"details\"].append(testcase_summary_json) summary_path = r\"{{SUMMARY_PATH_PLACEHOLDER}}\"", "os.path.join(logs_dir_path, test_path_relative_path) dump_file_name = \"all.summary.json\" else: file_relative_folder_path, test_file = os.path.split(test_path_relative_path)", "extract list(v2) to dict(v3) Args: extractors: [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}]", "= item.strip('\"') raw_list.append(f'\"{item}\"') elif item.isdigit(): # convert lst.0.name to lst[0].name", "in args: logger.warning( f\"generate conftest.py keep compatibility with --save-tests in", "= os.path.dirname(conftest_path) if not os.path.exists(dir_path): os.makedirs(dir_path) with open(conftest_path, \"w\", encoding=\"utf-8\")", "variables elif isinstance(raw_variables, Text): # get variables by function, e.g.", "!= 1: raise exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\" ) variables.update(var_item)", "raise exceptions.TestCaseFormatError(f\"Invalid jmespath extractor: {raw}\") # content.xx/json.xx => body.xx if", "= os.path.join(project_root_dir, \"conftest.py\") test_path = os.path.abspath(test_path) logs_dir_path = os.path.join(project_root_dir, \"logs\")", "step[\"teardown_hooks\"] if \"extract\" in step: test_dict[\"extract\"] = _convert_extractors(step[\"extract\"]) if \"export\"", "parse_data(raw_variables, {}, project_meta.functions) return variables else: raise exceptions.TestCaseFormatError( f\"Invalid variables", "= \"all.summary.json\" else: file_relative_folder_path, test_file = os.path.split(test_path_relative_path) file_foder_path = os.path.join(logs_dir_path,", "summary_path = os.path.join(file_foder_path, dump_file_name) conftest_content = conftest_content.replace( \"{{SUMMARY_PATH_PLACEHOLDER}}\", summary_path )", "\"/\" in path: path = os.sep.join(path.split(\"/\")) if \"\\\\\" in path:", "got {type(test_content[\"teststeps\"])}: {test_content[\"teststeps\"]}' ) sys.exit(1) for step in test_content[\"teststeps\"]: teststep", "else: raise exceptions.TestCaseFormatError(f\"Invalid teststep: {step}\") teststep.update(_ensure_step_attachment(step)) teststep = _sort_step_by_custom_order(teststep) v3_content[\"teststeps\"].append(teststep)", "def _sort_step_by_custom_order(step: Dict) -> Dict: custom_order = [ \"name\", \"variables\",", "time import pytest from loguru import logger from httprunner.utils import", "== 0: logger.error(f\"Invalid jmespath: {raw}\") sys.exit(1) last_item = raw_list.pop() item", "raw_list.append(item) return \".\".join(raw_list) def _convert_extractors(extractors: Union[List, Dict]) -> Dict: \"\"\"", "# format1: {\"check\": \"content.abc\", \"assert\": \"eq\", \"expect\": 201} v[\"check\"] =", "for item in raw.split(\".\"): if \"-\" in item: # add", "= _convert_jmespath(v[\"check\"]) elif len(v) == 1: # format2: {'eq': ['status_code',", "test_path = arg # FIXME: several test paths maybe specified", "\"config\": config, \"teststeps\": [teststep], } def ensure_testcase_v3(test_content: Dict) -> Dict:", "_convert_jmespath(v[\"check\"]) elif len(v) == 1: # format2: {'eq': ['status_code', 201]}", "\"fail\": 0}, \"teststeps\": {\"total\": 0, \"failures\": 0, \"successes\": 0}, },", "step[\"variables\"] if \"setup_hooks\" in step: test_dict[\"setup_hooks\"] = step[\"setup_hooks\"] if \"teardown_hooks\"", "teststep[\"testcase\"] = step.pop(\"api\") elif \"testcase\" in step: teststep[\"testcase\"] = step.pop(\"testcase\")", "teststep = { \"request\": _sort_request_by_custom_order(api_content[\"request\"]), } teststep.update(_ensure_step_attachment(api_content)) teststep = _sort_step_by_custom_order(teststep)", "--failfast if \"--failfast\" in args: logger.warning(f\"remove deprecated argument: --failfast\") args.pop(args.index(\"--failfast\"))", "{} for var_item in raw_variables: if not isinstance(var_item, Dict) or", "\"request\", \"testcase\", \"setup_hooks\", \"teardown_hooks\", \"extract\", \"validate\", \"validate_script\", ] return sort_dict_by_custom_order(step,", "raw_list.append(item) else: raw_list.append(item) return \".\".join(raw_list) def _convert_extractors(extractors: Union[List, Dict]) ->", "not isinstance(raw, Text): raise exceptions.TestCaseFormatError(f\"Invalid jmespath extractor: {raw}\") # content.xx/json.xx", "# [{\"var1\": 1}, {\"var2\": 2}] variables: Dict[Text, Any] = {}", "[]} if \"teststeps\" not in test_content: logger.error(f\"Miss teststeps: {test_content}\") sys.exit(1)", "f\"Invalid variables format: {raw_variables}\" ) def _convert_jmespath(raw: Text) -> Text:", "step: test_dict[\"export\"] = step[\"export\"] if \"validate\" in step: if not", "remove deprecated --failfast if \"--failfast\" in args: logger.warning(f\"remove deprecated argument:", "json import os import time import pytest from loguru import", "httprunner.loader import load_project_meta, convert_relative_project_root_dir from httprunner.parser import parse_data from httprunner.utils", "= item.instance.get_summary() summary[\"success\"] &= testcase_summary.success summary[\"stat\"][\"testcases\"][\"total\"] += 1 summary[\"stat\"][\"teststeps\"][\"total\"] +=", "yield logger.info(f\"task finished, generate task summary for --save-tests\") summary =", "testcase format v2\") v3_content = {\"config\": test_content[\"config\"], \"teststeps\": []} if", "keep compatibility with --save-tests in v2\" ) args.pop(args.index(\"--save-tests\")) _generate_conftest_for_summary(args) return", "test_content: logger.error(f\"Miss teststeps: {test_content}\") sys.exit(1) if not isinstance(test_content[\"teststeps\"], list): logger.error(", "else: logger.error(f\"No valid test path specified! \\nargs: {args}\") sys.exit(1) conftest_content", "= project_meta.RootDir conftest_path = os.path.join(project_root_dir, \"conftest.py\") test_path = os.path.abspath(test_path) logs_dir_path", "dir_path = os.path.dirname(conftest_path) if not os.path.exists(dir_path): os.makedirs(dir_path) with open(conftest_path, \"w\",", "conftest.py to generate summary.json\") def ensure_path_sep(path: Text) -> Text: \"\"\"", "Dict[Text, Any] = {} for var_item in raw_variables: if not", "{step}\") teststep.update(_ensure_step_attachment(step)) teststep = _sort_step_by_custom_order(teststep) v3_content[\"teststeps\"].append(teststep) return v3_content def ensure_cli_args(args:", "extractors else: logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for k, v in", "should be list type, got {type(test_content[\"teststeps\"])}: {test_content[\"teststeps\"]}' ) sys.exit(1) for", "List): for arg in args: if os.path.exists(arg): test_path = arg", "Dict = {} if isinstance(extractors, List): # [{\"varA\": \"content.varA\"}, {\"varB\":", "= time.time() yield logger.info(f\"task finished, generate task summary for --save-tests\")", "\"logs\") test_path_relative_path = convert_relative_project_root_dir(test_path) if os.path.isdir(test_path): file_foder_path = os.path.join(logs_dir_path, test_path_relative_path)", "logger.info(f\"generated task summary: {summary_path}\") ''' project_meta = load_project_meta(test_path) project_root_dir =", "summary for --save-tests\") summary = { \"success\": True, \"stat\": {", "# e.g. headers.Content-Type => headers.\"Content-Type\" item = item.strip('\"') raw_list.append(f'\"{item}\"') elif", "exceptions.TestCaseFormatError(f\"Invalid jmespath extractor: {raw}\") # content.xx/json.xx => body.xx if raw.startswith(\"content\"):", "import os import time import pytest from loguru import logger", "f\"generate conftest.py keep compatibility with --save-tests in v2\" ) args.pop(args.index(\"--save-tests\"))", "= os.path.splitext(test_file) dump_file_name = f\"{test_file_name}.summary.json\" summary_path = os.path.join(file_foder_path, dump_file_name) conftest_content", "--failfast\") args.pop(args.index(\"--failfast\")) # convert --report-file to --html if \"--report-file\" in", "and Windows \"\"\" if \"/\" in path: path = os.sep.join(path.split(\"/\"))", "variables format: {raw_variables}\" ) def _convert_jmespath(raw: Text) -> Text: if", "f'Invalid teststep validate: {step[\"validate\"]}' ) test_dict[\"validate\"] = _convert_validators(step[\"validate\"]) if \"validate_script\"", "if \"extract\" in step: test_dict[\"extract\"] = _convert_extractors(step[\"extract\"]) if \"export\" in", "if isinstance(extractors, List): # [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] for extractor", "summary[\"stat\"][\"testcases\"][\"total\"] += 1 summary[\"stat\"][\"teststeps\"][\"total\"] += len(testcase_summary.step_datas) if testcase_summary.success: summary[\"stat\"][\"testcases\"][\"success\"] +=", "logger.error( f'teststeps should be list type, got {type(test_content[\"teststeps\"])}: {test_content[\"teststeps\"]}' )", "step: teststep[\"testcase\"] = step.pop(\"testcase\") else: raise exceptions.TestCaseFormatError(f\"Invalid teststep: {step}\") teststep.update(_ensure_step_attachment(step))", "os import time import pytest from loguru import logger from", "\"cookies\", \"data\", \"json\", \"files\", \"timeout\", \"allow_redirects\", \"proxies\", \"verify\", \"stream\", \"auth\",", "elif isinstance(extractors, Dict): # {\"varA\": \"body.varA\", \"varB\": \"body.varB\"} v3_extractors =", "teststep[\"request\"] = _sort_request_by_custom_order(step.pop(\"request\")) elif \"api\" in step: teststep[\"testcase\"] = step.pop(\"api\")", "] return sort_dict_by_custom_order(request, custom_order) def _sort_step_by_custom_order(step: Dict) -> Dict: custom_order", "httprunner.parser import parse_data from httprunner.utils import sort_dict_by_custom_order def convert_variables( raw_variables:", "step: if not isinstance(step[\"validate\"], List): raise exceptions.TestCaseFormatError( f'Invalid teststep validate:", "e.g. headers.Content-Type => headers.\"Content-Type\" item = item.strip('\"') raw_list.append(f'\"{item}\"') elif item.isdigit():", "import json import os import time import pytest from loguru", "1 summary[\"stat\"][\"teststeps\"][\"successes\"] += ( len(testcase_summary.step_datas) - 1 ) summary[\"stat\"][\"teststeps\"][\"failures\"] +=", "item = f\"{last_item}[{item}]\" raw_list.append(item) else: raw_list.append(item) return \".\".join(raw_list) def _convert_extractors(extractors:", "v in validators: if \"check\" in v and \"expect\" in", "{raw_variables}\" ) variables.update(var_item) return variables elif isinstance(raw_variables, Text): # get", "=> headers.\"Content-Type\" item = item.strip('\"') raw_list.append(f'\"{item}\"') elif item.isdigit(): # convert", "List): raise exceptions.TestCaseFormatError( f'Invalid teststep validate: {step[\"validate\"]}' ) test_dict[\"validate\"] =", "0, \"successes\": 0}, }, \"time\": {\"start_at\": start_at, \"duration\": time.time() -", "def _convert_extractors(extractors: Union[List, Dict]) -> Dict: \"\"\" convert extract list(v2)", "step[\"export\"] if \"validate\" in step: if not isinstance(step[\"validate\"], List): raise", "0, \"success\": 0, \"fail\": 0}, \"teststeps\": {\"total\": 0, \"failures\": 0,", "var_item in raw_variables: if not isinstance(var_item, Dict) or len(var_item) !=", "for var_item in raw_variables: if not isinstance(var_item, Dict) or len(var_item)", "return args def _generate_conftest_for_summary(args: List): for arg in args: if", "each task\"\"\" logger.info(f\"start running testcases ...\") start_at = time.time() yield", "f\"{last_item}[{item}]\" raw_list.append(item) else: raw_list.append(item) return \".\".join(raw_list) def _convert_extractors(extractors: Union[List, Dict])", "--html if \"--report-file\" in args: logger.warning(f\"replace deprecated argument --report-file with", "logger from httprunner.utils import get_platform, ExtendJSONEncoder @pytest.fixture(scope=\"session\", autouse=True) def session_fixture(request):", "\"request\": _sort_request_by_custom_order(api_content[\"request\"]), } teststep.update(_ensure_step_attachment(api_content)) teststep = _sort_step_by_custom_order(teststep) config = {\"name\":", "testcase_summary.success summary[\"stat\"][\"testcases\"][\"total\"] += 1 summary[\"stat\"][\"teststeps\"][\"total\"] += len(testcase_summary.step_datas) if testcase_summary.success: summary[\"stat\"][\"testcases\"][\"success\"]", "return v3_extractors def _convert_validators(validators: List) -> List: for v in", "( len(testcase_summary.step_datas) - 1 ) summary[\"stat\"][\"teststeps\"][\"failures\"] += 1 testcase_summary_json =", "summary[\"details\"].append(testcase_summary_json) summary_path = r\"{{SUMMARY_PATH_PLACEHOLDER}}\" summary_dir = os.path.dirname(summary_path) os.makedirs(summary_dir, exist_ok=True) with", "load_project_meta(test_path) project_root_dir = project_meta.RootDir conftest_path = os.path.join(project_root_dir, \"conftest.py\") test_path =", "\"varB\": \"body.varB\"} \"\"\" v3_extractors: Dict = {} if isinstance(extractors, List):", "= r\"{{SUMMARY_PATH_PLACEHOLDER}}\" summary_dir = os.path.dirname(summary_path) os.makedirs(summary_dir, exist_ok=True) with open(summary_path, \"w\",", "_ = os.path.splitext(test_file) dump_file_name = f\"{test_file_name}.summary.json\" summary_path = os.path.join(file_foder_path, dump_file_name)", "quotes for field with separator # e.g. headers.Content-Type => headers.\"Content-Type\"", "with deprecated cli args in v2 \"\"\" # remove deprecated", "-> Dict: \"\"\" convert extract list(v2) to dict(v3) Args: extractors:", "0, \"fail\": 0}, \"teststeps\": {\"total\": 0, \"failures\": 0, \"successes\": 0},", "compatibility with --save-tests in v2\" ) args.pop(args.index(\"--save-tests\")) _generate_conftest_for_summary(args) return args", "from httprunner.utils import sort_dict_by_custom_order def convert_variables( raw_variables: Union[Dict, List, Text],", "= _convert_jmespath(v) return v3_extractors def _convert_validators(validators: List) -> List: for", "be list type, got {type(test_content[\"teststeps\"])}: {test_content[\"teststeps\"]}' ) sys.exit(1) for step", "if \"--save-tests\" in args: logger.warning( f\"generate conftest.py keep compatibility with", "\"extract\" in step: test_dict[\"extract\"] = _convert_extractors(step[\"extract\"]) if \"export\" in step:", "Dict): # {\"varA\": \"body.varA\", \"varB\": \"body.varB\"} v3_extractors = extractors else:", "args in v2 \"\"\" # remove deprecated --failfast if \"--failfast\"", "isinstance(raw, Text): raise exceptions.TestCaseFormatError(f\"Invalid jmespath extractor: {raw}\") # content.xx/json.xx =>", "{extractors}\") sys.exit(1) for k, v in extractor.items(): v3_extractors[k] = v", "with testcase format v2\") v3_content = {\"config\": test_content[\"config\"], \"teststeps\": []}", "in v2\" ) args.pop(args.index(\"--save-tests\")) _generate_conftest_for_summary(args) return args def _generate_conftest_for_summary(args: List):", "raw.split(\".\"): if \"-\" in item: # add quotes for field", "\"data\", \"json\", \"files\", \"timeout\", \"allow_redirects\", \"proxies\", \"verify\", \"stream\", \"auth\", \"cert\",", "arg # FIXME: several test paths maybe specified break else:", "sys.exit(1) conftest_content = '''# NOTICE: Generated By HttpRunner. import json", "1 summary[\"stat\"][\"teststeps\"][\"total\"] += len(testcase_summary.step_datas) if testcase_summary.success: summary[\"stat\"][\"testcases\"][\"success\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"]", "lst[0].name if len(raw_list) == 0: logger.error(f\"Invalid jmespath: {raw}\") sys.exit(1) last_item", "_convert_jmespath(v) return v3_extractors def _convert_validators(validators: List) -> List: for v", "testcase_summary.step_datas ) else: summary[\"stat\"][\"testcases\"][\"fail\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += ( len(testcase_summary.step_datas)", "in v3_extractors.items(): v3_extractors[k] = _convert_jmespath(v) return v3_extractors def _convert_validators(validators: List)", "len(v) == 1: # format2: {'eq': ['status_code', 201]} comparator =", "-> Dict[Text, Any]: if isinstance(raw_variables, Dict): return raw_variables if isinstance(raw_variables,", "to dict(v3) Args: extractors: [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] Returns: {\"varA\":", "# convert --report-file to --html if \"--report-file\" in args: logger.warning(f\"replace", "raw_variables if isinstance(raw_variables, List): # [{\"var1\": 1}, {\"var2\": 2}] variables:", "item = item.strip('\"') raw_list.append(f'\"{item}\"') elif item.isdigit(): # convert lst.0.name to", "Dict) or len(var_item) != 1: raise exceptions.TestCaseFormatError( f\"Invalid variables format:", "# get variables by function, e.g. ${get_variables()} project_meta = load_project_meta(test_path)", "201} v[\"check\"] = _convert_jmespath(v[\"check\"]) elif len(v) == 1: # format2:", "_sort_step_by_custom_order(teststep) config = {\"name\": api_content[\"name\"]} extract_variable_names: List = list(teststep.get(\"extract\", {}).keys())", "test_dict[\"extract\"] = _convert_extractors(step[\"extract\"]) if \"export\" in step: test_dict[\"export\"] = step[\"export\"]", "\"platform\": get_platform(), \"details\": [], } for item in request.node.items: testcase_summary", "if raw.startswith(\"content\"): raw = f\"body{raw[len('content'):]}\" elif raw.startswith(\"json\"): raw = f\"body{raw[len('json'):]}\"", "for item in request.node.items: testcase_summary = item.instance.get_summary() summary[\"success\"] &= testcase_summary.success", "in validators: if \"check\" in v and \"expect\" in v:", "logger.warning(f\"replace deprecated argument --report-file with --html\") index = args.index(\"--report-file\") args[index]", "FIXME: several test paths maybe specified break else: logger.error(f\"No valid", "= {\"name\": api_content[\"name\"]} extract_variable_names: List = list(teststep.get(\"extract\", {}).keys()) if extract_variable_names:", "httprunner.utils import get_platform, ExtendJSONEncoder @pytest.fixture(scope=\"session\", autouse=True) def session_fixture(request): \"\"\"setup and", "teardown each task\"\"\" logger.info(f\"start running testcases ...\") start_at = time.time()", "\"w\", encoding=\"utf-8\") as f: json.dump(summary, f, indent=4, ensure_ascii=False, cls=ExtendJSONEncoder) logger.info(f\"generated", "api_content[\"name\"]} extract_variable_names: List = list(teststep.get(\"extract\", {}).keys()) if extract_variable_names: config[\"export\"] =", "_convert_extractors(step[\"extract\"]) if \"export\" in step: test_dict[\"export\"] = step[\"export\"] if \"validate\"", ") summary[\"stat\"][\"teststeps\"][\"failures\"] += 1 testcase_summary_json = testcase_summary.dict() testcase_summary_json[\"records\"] = testcase_summary_json.pop(\"step_datas\")", "List): # [{\"var1\": 1}, {\"var2\": 2}] variables: Dict[Text, Any] =", "os.path.join(project_root_dir, \"logs\") test_path_relative_path = convert_relative_project_root_dir(test_path) if os.path.isdir(test_path): file_foder_path = os.path.join(logs_dir_path,", "if extract_variable_names: config[\"export\"] = extract_variable_names return { \"config\": config, \"teststeps\":", "\"request\" in step: teststep[\"request\"] = _sort_request_by_custom_order(step.pop(\"request\")) elif \"api\" in step:", "HttpRunner. import json import os import time import pytest from", "sort_dict_by_custom_order def convert_variables( raw_variables: Union[Dict, List, Text], test_path: Text )", "List: \"\"\" ensure compatibility with deprecated cli args in v2", "= parse_data(raw_variables, {}, project_meta.functions) return variables else: raise exceptions.TestCaseFormatError( f\"Invalid", "summary[\"stat\"][\"testcases\"][\"success\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += len( testcase_summary.step_datas ) else: summary[\"stat\"][\"testcases\"][\"fail\"]", "test_dict[\"teardown_hooks\"] = step[\"teardown_hooks\"] if \"extract\" in step: test_dict[\"extract\"] = _convert_extractors(step[\"extract\"])", "v in v3_extractors.items(): v3_extractors[k] = _convert_jmespath(v) return v3_extractors def _convert_validators(validators:", "List: for v in validators: if \"check\" in v and", "paths maybe specified break else: logger.error(f\"No valid test path specified!", "from httprunner.utils import get_platform, ExtendJSONEncoder @pytest.fixture(scope=\"session\", autouse=True) def session_fixture(request): \"\"\"setup", "pytest from loguru import logger from httprunner.utils import get_platform, ExtendJSONEncoder", "import pytest from loguru import logger from httprunner.utils import get_platform,", "= {\"config\": test_content[\"config\"], \"teststeps\": []} if \"teststeps\" not in test_content:", "v3_extractors = extractors else: logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for k,", "\"time\": {\"start_at\": start_at, \"duration\": time.time() - start_at}, \"platform\": get_platform(), \"details\":", "\"assert\": \"eq\", \"expect\": 201} v[\"check\"] = _convert_jmespath(v[\"check\"]) elif len(v) ==", "1 ) summary[\"stat\"][\"teststeps\"][\"failures\"] += 1 testcase_summary_json = testcase_summary.dict() testcase_summary_json[\"records\"] =", "list type, got {type(test_content[\"teststeps\"])}: {test_content[\"teststeps\"]}' ) sys.exit(1) for step in", "Text) -> Text: \"\"\" ensure compatibility with different path separators", "in v and \"expect\" in v: # format1: {\"check\": \"content.abc\",", "not isinstance(test_content[\"teststeps\"], list): logger.error( f'teststeps should be list type, got", "test path specified! \\nargs: {args}\") sys.exit(1) conftest_content = '''# NOTICE:", "else: summary[\"stat\"][\"testcases\"][\"fail\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] += ( len(testcase_summary.step_datas) - 1", "with --html\") index = args.index(\"--report-file\") args[index] = \"--html\" args.append(\"--self-contained-html\") #", "f'teststeps should be list type, got {type(test_content[\"teststeps\"])}: {test_content[\"teststeps\"]}' ) sys.exit(1)", "{\"varB\": \"json.varB\"}] for extractor in extractors: if not isinstance(extractor, Dict):", "import time import pytest from loguru import logger from httprunner.utils", "0, \"failures\": 0, \"successes\": 0}, }, \"time\": {\"start_at\": start_at, \"duration\":", "f, indent=4, ensure_ascii=False, cls=ExtendJSONEncoder) logger.info(f\"generated task summary: {summary_path}\") ''' project_meta", "as f: f.write(conftest_content) logger.info(\"generated conftest.py to generate summary.json\") def ensure_path_sep(path:", "Text], test_path: Text ) -> Dict[Text, Any]: if isinstance(raw_variables, Dict):", "test_dict[\"variables\"] = step[\"variables\"] if \"setup_hooks\" in step: test_dict[\"setup_hooks\"] = step[\"setup_hooks\"]", "+= 1 testcase_summary_json = testcase_summary.dict() testcase_summary_json[\"records\"] = testcase_summary_json.pop(\"step_datas\") summary[\"details\"].append(testcase_summary_json) summary_path", "\"expect\" in v: # format1: {\"check\": \"content.abc\", \"assert\": \"eq\", \"expect\":", "之间测试用例兼容性问题 \"\"\" import os import sys from typing import List,", "if \"--failfast\" in args: logger.warning(f\"remove deprecated argument: --failfast\") args.pop(args.index(\"--failfast\")) #", "\"eq\", \"expect\": 201} v[\"check\"] = _convert_jmespath(v[\"check\"]) elif len(v) == 1:", "os.path.split(test_path_relative_path) file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path) test_file_name, _ = os.path.splitext(test_file) dump_file_name", "True, \"stat\": { \"testcases\": {\"total\": 0, \"success\": 0, \"fail\": 0},", "\"export\" in step: test_dict[\"export\"] = step[\"export\"] if \"validate\" in step:", "convert lst.0.name to lst[0].name if len(raw_list) == 0: logger.error(f\"Invalid jmespath:", "\"validate_script\" in step: test_dict[\"validate_script\"] = step[\"validate_script\"] return test_dict def ensure_testcase_v3_api(api_content:", "path separators of Linux and Windows \"\"\" if \"/\" in", "return \".\".join(raw_list) def _convert_extractors(extractors: Union[List, Dict]) -> Dict: \"\"\" convert", "v elif isinstance(extractors, Dict): # {\"varA\": \"body.varA\", \"varB\": \"body.varB\"} v3_extractors", "else: raise exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\" ) def _convert_jmespath(raw:", "def _ensure_step_attachment(step: Dict) -> Dict: test_dict = { \"name\": step[\"name\"],", "in v2 to testcase format v3\") teststep = { \"request\":", "{extractors}\") sys.exit(1) for k, v in v3_extractors.items(): v3_extractors[k] = _convert_jmespath(v)", "raise exceptions.TestCaseFormatError( f'Invalid teststep validate: {step[\"validate\"]}' ) test_dict[\"validate\"] = _convert_validators(step[\"validate\"])", "if \"request\" in step: teststep[\"request\"] = _sort_request_by_custom_order(step.pop(\"request\")) elif \"api\" in", "get variables by function, e.g. ${get_variables()} project_meta = load_project_meta(test_path) variables", "generate task summary for --save-tests\") summary = { \"success\": True,", "item.isdigit(): # convert lst.0.name to lst[0].name if len(raw_list) == 0:", "v3. 解决httprunner2 和 3 之间测试用例兼容性问题 \"\"\" import os import sys", "convert --report-file to --html if \"--report-file\" in args: logger.warning(f\"replace deprecated", "get_platform(), \"details\": [], } for item in request.node.items: testcase_summary =", "isinstance(extractors, List): # [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] for extractor in", "List, Text], test_path: Text ) -> Dict[Text, Any]: if isinstance(raw_variables,", "testcase format v3\") teststep = { \"request\": _sort_request_by_custom_order(api_content[\"request\"]), } teststep.update(_ensure_step_attachment(api_content))", "of Linux and Windows \"\"\" if \"/\" in path: path", "Text ) -> Dict[Text, Any]: if isinstance(raw_variables, Dict): return raw_variables", "function, e.g. ${get_variables()} project_meta = load_project_meta(test_path) variables = parse_data(raw_variables, {},", "List) -> List: for v in validators: if \"check\" in", "list(teststep.get(\"extract\", {}).keys()) if extract_variable_names: config[\"export\"] = extract_variable_names return { \"config\":", "summary[\"stat\"][\"teststeps\"][\"successes\"] += ( len(testcase_summary.step_datas) - 1 ) summary[\"stat\"][\"teststeps\"][\"failures\"] += 1", "= step.pop(\"api\") elif \"testcase\" in step: teststep[\"testcase\"] = step.pop(\"testcase\") else:", "test_file = os.path.split(test_path_relative_path) file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path) test_file_name, _ =", "\"content.abc\", \"assert\": \"eq\", \"expect\": 201} v[\"check\"] = _convert_jmespath(v[\"check\"]) elif len(v)", "_sort_request_by_custom_order(step.pop(\"request\")) elif \"api\" in step: teststep[\"testcase\"] = step.pop(\"api\") elif \"testcase\"", "conftest_path = os.path.join(project_root_dir, \"conftest.py\") test_path = os.path.abspath(test_path) logs_dir_path = os.path.join(project_root_dir,", "project_meta.functions) return variables else: raise exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\"", "import sort_dict_by_custom_order def convert_variables( raw_variables: Union[Dict, List, Text], test_path: Text", "for arg in args: if os.path.exists(arg): test_path = arg #", "import logger from httprunner import exceptions from httprunner.loader import load_project_meta,", "{\"name\": api_content[\"name\"]} extract_variable_names: List = list(teststep.get(\"extract\", {}).keys()) if extract_variable_names: config[\"export\"]", "sys from typing import List, Dict, Text, Union, Any from", "--report-file with --html\") index = args.index(\"--report-file\") args[index] = \"--html\" args.append(\"--self-contained-html\")", "lst.0.name to lst[0].name if len(raw_list) == 0: logger.error(f\"Invalid jmespath: {raw}\")", "maybe specified break else: logger.error(f\"No valid test path specified! \\nargs:", "argument --report-file with --html\") index = args.index(\"--report-file\") args[index] = \"--html\"", "# keep compatibility with --save-tests in v2 if \"--save-tests\" in", "import exceptions from httprunner.loader import load_project_meta, convert_relative_project_root_dir from httprunner.parser import", "logger.info(\"convert api in v2 to testcase format v3\") teststep =", "import parse_data from httprunner.utils import sort_dict_by_custom_order def convert_variables( raw_variables: Union[Dict,", "v3\") teststep = { \"request\": _sort_request_by_custom_order(api_content[\"request\"]), } teststep.update(_ensure_step_attachment(api_content)) teststep =", "解决httprunner2 和 3 之间测试用例兼容性问题 \"\"\" import os import sys from", "[teststep], } def ensure_testcase_v3(test_content: Dict) -> Dict: logger.info(\"ensure compatibility with", "for extractor in extractors: if not isinstance(extractor, Dict): logger.error(f\"Invalid extractor:", "in step: test_dict[\"teardown_hooks\"] = step[\"teardown_hooks\"] if \"extract\" in step: test_dict[\"extract\"]", "in v2 if \"--save-tests\" in args: logger.warning( f\"generate conftest.py keep", "summary_path = r\"{{SUMMARY_PATH_PLACEHOLDER}}\" summary_dir = os.path.dirname(summary_path) os.makedirs(summary_dir, exist_ok=True) with open(summary_path,", "if \"validate\" in step: if not isinstance(step[\"validate\"], List): raise exceptions.TestCaseFormatError(", "elif isinstance(raw_variables, Text): # get variables by function, e.g. ${get_variables()}", "v2 \"\"\" # remove deprecated --failfast if \"--failfast\" in args:", "+= ( len(testcase_summary.step_datas) - 1 ) summary[\"stat\"][\"teststeps\"][\"failures\"] += 1 testcase_summary_json", "v2 and v3. 解决httprunner2 和 3 之间测试用例兼容性问题 \"\"\" import os", "} teststep.update(_ensure_step_attachment(api_content)) teststep = _sort_step_by_custom_order(teststep) config = {\"name\": api_content[\"name\"]} extract_variable_names:", "keep compatibility with --save-tests in v2 if \"--save-tests\" in args:", "return raw_variables if isinstance(raw_variables, List): # [{\"var1\": 1}, {\"var2\": 2}]", "custom_order) def _ensure_step_attachment(step: Dict) -> Dict: test_dict = { \"name\":", "= _convert_extractors(step[\"extract\"]) if \"export\" in step: test_dict[\"export\"] = step[\"export\"] if", "convert_variables( raw_variables: Union[Dict, List, Text], test_path: Text ) -> Dict[Text,", "Any] = {} for var_item in raw_variables: if not isinstance(var_item,", "args.pop(args.index(\"--save-tests\")) _generate_conftest_for_summary(args) return args def _generate_conftest_for_summary(args: List): for arg in", "item in raw.split(\".\"): if \"-\" in item: # add quotes", "step: test_dict[\"teardown_hooks\"] = step[\"teardown_hooks\"] if \"extract\" in step: test_dict[\"extract\"] =", "open(conftest_path, \"w\", encoding=\"utf-8\") as f: f.write(conftest_content) logger.info(\"generated conftest.py to generate", "os.path.splitext(test_file) dump_file_name = f\"{test_file_name}.summary.json\" summary_path = os.path.join(file_foder_path, dump_file_name) conftest_content =", "list(v2) to dict(v3) Args: extractors: [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] Returns:", "{} if isinstance(extractors, List): # [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] for", "extractor in extractors: if not isinstance(extractor, Dict): logger.error(f\"Invalid extractor: {extractors}\")", "_sort_request_by_custom_order(request: Dict) -> Dict: custom_order = [ \"method\", \"url\", \"params\",", "= list(teststep.get(\"extract\", {}).keys()) if extract_variable_names: config[\"export\"] = extract_variable_names return {", "\"--save-tests\" in args: logger.warning( f\"generate conftest.py keep compatibility with --save-tests", "logger.warning( f\"generate conftest.py keep compatibility with --save-tests in v2\" )", "autouse=True) def session_fixture(request): \"\"\"setup and teardown each task\"\"\" logger.info(f\"start running", "\"proxies\", \"verify\", \"stream\", \"auth\", \"cert\", ] return sort_dict_by_custom_order(request, custom_order) def", "Dict: custom_order = [ \"method\", \"url\", \"params\", \"headers\", \"cookies\", \"data\",", "to testcase format v3\") teststep = { \"request\": _sort_request_by_custom_order(api_content[\"request\"]), }", "= os.path.dirname(summary_path) os.makedirs(summary_dir, exist_ok=True) with open(summary_path, \"w\", encoding=\"utf-8\") as f:", "teststep.update(_ensure_step_attachment(step)) teststep = _sort_step_by_custom_order(teststep) v3_content[\"teststeps\"].append(teststep) return v3_content def ensure_cli_args(args: List)", "to generate summary.json\") def ensure_path_sep(path: Text) -> Text: \"\"\" ensure", "return variables else: raise exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\" )", "in step: teststep[\"testcase\"] = step.pop(\"testcase\") else: raise exceptions.TestCaseFormatError(f\"Invalid teststep: {step}\")", ") args.pop(args.index(\"--save-tests\")) _generate_conftest_for_summary(args) return args def _generate_conftest_for_summary(args: List): for arg", "logger.error(f\"Miss teststeps: {test_content}\") sys.exit(1) if not isinstance(test_content[\"teststeps\"], list): logger.error( f'teststeps", "summary[\"stat\"][\"teststeps\"][\"total\"] += len(testcase_summary.step_datas) if testcase_summary.success: summary[\"stat\"][\"testcases\"][\"success\"] += 1 summary[\"stat\"][\"teststeps\"][\"successes\"] +=", "2}] variables: Dict[Text, Any] = {} for var_item in raw_variables:", "v3_extractors[k] = v elif isinstance(extractors, Dict): # {\"varA\": \"body.varA\", \"varB\":", "from httprunner.loader import load_project_meta, convert_relative_project_root_dir from httprunner.parser import parse_data from", "logger.info(f\"start running testcases ...\") start_at = time.time() yield logger.info(f\"task finished,", "variables = parse_data(raw_variables, {}, project_meta.functions) return variables else: raise exceptions.TestCaseFormatError(", "teststeps: {test_content}\") sys.exit(1) if not isinstance(test_content[\"teststeps\"], list): logger.error( f'teststeps should", "variables else: raise exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\" ) def", "Dict): logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for k, v in extractor.items():", "\"variables\" in step: test_dict[\"variables\"] = step[\"variables\"] if \"setup_hooks\" in step:", "time.time() - start_at}, \"platform\": get_platform(), \"details\": [], } for item", "raw_list.pop() item = f\"{last_item}[{item}]\" raw_list.append(item) else: raw_list.append(item) return \".\".join(raw_list) def", "和 3 之间测试用例兼容性问题 \"\"\" import os import sys from typing", ") sys.exit(1) for step in test_content[\"teststeps\"]: teststep = {} if", "Generated By HttpRunner. import json import os import time import", "ExtendJSONEncoder @pytest.fixture(scope=\"session\", autouse=True) def session_fixture(request): \"\"\"setup and teardown each task\"\"\"", "step[\"validate_script\"] return test_dict def ensure_testcase_v3_api(api_content: Dict) -> Dict: logger.info(\"convert api", "Dict) -> Dict: custom_order = [ \"method\", \"url\", \"params\", \"headers\",", "variables.update(var_item) return variables elif isinstance(raw_variables, Text): # get variables by", "Text): raise exceptions.TestCaseFormatError(f\"Invalid jmespath extractor: {raw}\") # content.xx/json.xx => body.xx", "index = args.index(\"--report-file\") args[index] = \"--html\" args.append(\"--self-contained-html\") # keep compatibility", "Text: \"\"\" ensure compatibility with different path separators of Linux", "= { \"name\": step[\"name\"], } if \"variables\" in step: test_dict[\"variables\"]", "--report-file to --html if \"--report-file\" in args: logger.warning(f\"replace deprecated argument", "# content.xx/json.xx => body.xx if raw.startswith(\"content\"): raw = f\"body{raw[len('content'):]}\" elif", "_convert_extractors(extractors: Union[List, Dict]) -> Dict: \"\"\" convert extract list(v2) to", "in v: # format1: {\"check\": \"content.abc\", \"assert\": \"eq\", \"expect\": 201}", "compatibility with different path separators of Linux and Windows \"\"\"", "ensure_ascii=False, cls=ExtendJSONEncoder) logger.info(f\"generated task summary: {summary_path}\") ''' project_meta = load_project_meta(test_path)", "= arg # FIXME: several test paths maybe specified break", "httprunner import exceptions from httprunner.loader import load_project_meta, convert_relative_project_root_dir from httprunner.parser", "= step[\"validate_script\"] return test_dict def ensure_testcase_v3_api(api_content: Dict) -> Dict: logger.info(\"convert", "NOTICE: Generated By HttpRunner. import json import os import time", "item in request.node.items: testcase_summary = item.instance.get_summary() summary[\"success\"] &= testcase_summary.success summary[\"stat\"][\"testcases\"][\"total\"]", "# {\"varA\": \"body.varA\", \"varB\": \"body.varB\"} v3_extractors = extractors else: logger.error(f\"Invalid", "from typing import List, Dict, Text, Union, Any from loguru", "\"\"\" if \"/\" in path: path = os.sep.join(path.split(\"/\")) if \"\\\\\"", "{}, project_meta.functions) return variables else: raise exceptions.TestCaseFormatError( f\"Invalid variables format:", "for k, v in extractor.items(): v3_extractors[k] = v elif isinstance(extractors,", "in raw.split(\".\"): if \"-\" in item: # add quotes for", "# add quotes for field with separator # e.g. headers.Content-Type", "for field with separator # e.g. headers.Content-Type => headers.\"Content-Type\" item", "if len(raw_list) == 0: logger.error(f\"Invalid jmespath: {raw}\") sys.exit(1) last_item =", "compatibility with --save-tests in v2 if \"--save-tests\" in args: logger.warning(", "\"setup_hooks\" in step: test_dict[\"setup_hooks\"] = step[\"setup_hooks\"] if \"teardown_hooks\" in step:", "jmespath extractor: {raw}\") # content.xx/json.xx => body.xx if raw.startswith(\"content\"): raw", "--save-tests\") summary = { \"success\": True, \"stat\": { \"testcases\": {\"total\":", "{step[\"validate\"]}' ) test_dict[\"validate\"] = _convert_validators(step[\"validate\"]) if \"validate_script\" in step: test_dict[\"validate_script\"]", "Dict) -> Dict: test_dict = { \"name\": step[\"name\"], } if", "\"name\", \"variables\", \"request\", \"testcase\", \"setup_hooks\", \"teardown_hooks\", \"extract\", \"validate\", \"validate_script\", ]", "logger.warning(f\"remove deprecated argument: --failfast\") args.pop(args.index(\"--failfast\")) # convert --report-file to --html", "from loguru import logger from httprunner.utils import get_platform, ExtendJSONEncoder @pytest.fixture(scope=\"session\",", "Windows \"\"\" if \"/\" in path: path = os.sep.join(path.split(\"/\")) if", "= os.sep.join(path.split(\"/\")) if \"\\\\\" in path: path = os.sep.join(path.split(\"\\\\\")) return", "format v3\") teststep = { \"request\": _sort_request_by_custom_order(api_content[\"request\"]), } teststep.update(_ensure_step_attachment(api_content)) teststep", "\"conftest.py\") test_path = os.path.abspath(test_path) logs_dir_path = os.path.join(project_root_dir, \"logs\") test_path_relative_path =", "Union[List, Dict]) -> Dict: \"\"\" convert extract list(v2) to dict(v3)", "several test paths maybe specified break else: logger.error(f\"No valid test", "= f\"body{raw[len('json'):]}\" raw_list = [] for item in raw.split(\".\"): if", "isinstance(extractor, Dict): logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for k, v in", "\"stream\", \"auth\", \"cert\", ] return sort_dict_by_custom_order(request, custom_order) def _sort_step_by_custom_order(step: Dict)", "Dict: \"\"\" convert extract list(v2) to dict(v3) Args: extractors: [{\"varA\":", "...\") start_at = time.time() yield logger.info(f\"task finished, generate task summary", "\"\"\" convert extract list(v2) to dict(v3) Args: extractors: [{\"varA\": \"content.varA\"},", "teststep = _sort_step_by_custom_order(teststep) v3_content[\"teststeps\"].append(teststep) return v3_content def ensure_cli_args(args: List) ->", "not isinstance(var_item, Dict) or len(var_item) != 1: raise exceptions.TestCaseFormatError( f\"Invalid", "step: test_dict[\"extract\"] = _convert_extractors(step[\"extract\"]) if \"export\" in step: test_dict[\"export\"] =", "teststep[\"testcase\"] = step.pop(\"testcase\") else: raise exceptions.TestCaseFormatError(f\"Invalid teststep: {step}\") teststep.update(_ensure_step_attachment(step)) teststep", "isinstance(test_content[\"teststeps\"], list): logger.error( f'teststeps should be list type, got {type(test_content[\"teststeps\"])}:", "in request.node.items: testcase_summary = item.instance.get_summary() summary[\"success\"] &= testcase_summary.success summary[\"stat\"][\"testcases\"][\"total\"] +=", "else: logger.error(f\"Invalid extractor: {extractors}\") sys.exit(1) for k, v in v3_extractors.items():", "os.makedirs(summary_dir, exist_ok=True) with open(summary_path, \"w\", encoding=\"utf-8\") as f: json.dump(summary, f,", "loguru import logger from httprunner import exceptions from httprunner.loader import", "sort_dict_by_custom_order(request, custom_order) def _sort_step_by_custom_order(step: Dict) -> Dict: custom_order = [", "} if \"variables\" in step: test_dict[\"variables\"] = step[\"variables\"] if \"setup_hooks\"", "ensure compatibility with deprecated cli args in v2 \"\"\" #", "\"details\": [], } for item in request.node.items: testcase_summary = item.instance.get_summary()", "different path separators of Linux and Windows \"\"\" if \"/\"", "v3_content def ensure_cli_args(args: List) -> List: \"\"\" ensure compatibility with", ") def _convert_jmespath(raw: Text) -> Text: if not isinstance(raw, Text):", "{test_content}\") sys.exit(1) if not isinstance(test_content[\"teststeps\"], list): logger.error( f'teststeps should be", "logger.error(f\"No valid test path specified! \\nargs: {args}\") sys.exit(1) conftest_content =", "List): # [{\"varA\": \"content.varA\"}, {\"varB\": \"json.varB\"}] for extractor in extractors:", "import logger from httprunner.utils import get_platform, ExtendJSONEncoder @pytest.fixture(scope=\"session\", autouse=True) def", "f.write(conftest_content) logger.info(\"generated conftest.py to generate summary.json\") def ensure_path_sep(path: Text) ->", "type, got {type(test_content[\"teststeps\"])}: {test_content[\"teststeps\"]}' ) sys.exit(1) for step in test_content[\"teststeps\"]:", "=> body.xx if raw.startswith(\"content\"): raw = f\"body{raw[len('content'):]}\" elif raw.startswith(\"json\"): raw", "project_meta.RootDir conftest_path = os.path.join(project_root_dir, \"conftest.py\") test_path = os.path.abspath(test_path) logs_dir_path =", "0}, }, \"time\": {\"start_at\": start_at, \"duration\": time.time() - start_at}, \"platform\":", "= os.path.abspath(test_path) logs_dir_path = os.path.join(project_root_dir, \"logs\") test_path_relative_path = convert_relative_project_root_dir(test_path) if", "v3_content = {\"config\": test_content[\"config\"], \"teststeps\": []} if \"teststeps\" not in", "-> Text: \"\"\" ensure compatibility with different path separators of", "separator # e.g. headers.Content-Type => headers.\"Content-Type\" item = item.strip('\"') raw_list.append(f'\"{item}\"')", "_ensure_step_attachment(step: Dict) -> Dict: test_dict = { \"name\": step[\"name\"], }", "args: if os.path.exists(arg): test_path = arg # FIXME: several test", "1: raise exceptions.TestCaseFormatError( f\"Invalid variables format: {raw_variables}\" ) variables.update(var_item) return", "field with separator # e.g. headers.Content-Type => headers.\"Content-Type\" item =", "in step: teststep[\"request\"] = _sort_request_by_custom_order(step.pop(\"request\")) elif \"api\" in step: teststep[\"testcase\"]", "k, v in v3_extractors.items(): v3_extractors[k] = _convert_jmespath(v) return v3_extractors def", "else: file_relative_folder_path, test_file = os.path.split(test_path_relative_path) file_foder_path = os.path.join(logs_dir_path, file_relative_folder_path) test_file_name,", "ensure_path_sep(path: Text) -> Text: \"\"\" ensure compatibility with different path", "raw_variables: if not isinstance(var_item, Dict) or len(var_item) != 1: raise", "\"\"\" This module handles compatibility issues between testcase format v2", "json.dump(summary, f, indent=4, ensure_ascii=False, cls=ExtendJSONEncoder) logger.info(f\"generated task summary: {summary_path}\") '''", "format: {raw_variables}\" ) variables.update(var_item) return variables elif isinstance(raw_variables, Text): #", "\"check\" in v and \"expect\" in v: # format1: {\"check\":", "Dict: logger.info(\"ensure compatibility with testcase format v2\") v3_content = {\"config\":", "deprecated cli args in v2 \"\"\" # remove deprecated --failfast", "\"--html\" args.append(\"--self-contained-html\") # keep compatibility with --save-tests in v2 if", "\"json\", \"files\", \"timeout\", \"allow_redirects\", \"proxies\", \"verify\", \"stream\", \"auth\", \"cert\", ]", "session_fixture(request): \"\"\"setup and teardown each task\"\"\" logger.info(f\"start running testcases ...\")", "logger.info(f\"task finished, generate task summary for --save-tests\") summary = {", "start_at}, \"platform\": get_platform(), \"details\": [], } for item in request.node.items:", "= step[\"teardown_hooks\"] if \"extract\" in step: test_dict[\"extract\"] = _convert_extractors(step[\"extract\"]) if", "Text): # get variables by function, e.g. ${get_variables()} project_meta =" ]
[ "can click-drag a rectangular region to zoom. If you use", "scatter plot\" bg_color=\"lightgray\" #=============================================================================== # # Demo class that is", "bg_color=\"lightgray\" #=============================================================================== # # Demo class that is used by", "class that is used by the demo.py application. #=============================================================================== class", "= View( Group( Item('plot', editor=ComponentEditor(size=size, bgcolor=bg_color), show_label=False), orientation = \"vertical\"),", "sequence of zoom boxes, pressing alt-left-arrow and alt-right-arrow moves you", "Chaco panning and zooming. Interacting with the plot: - Left-mouse-drag", "import HasTraits, Instance from traitsui.api import Item, Group, View #", "and zooming. Interacting with the plot: - Left-mouse-drag pans the", "\"value\"), type=\"scatter\", marker=\"circle\", index_sort=\"ascending\", color=\"orange\", marker_size=3, bgcolor=\"white\") # Tweak some", "zoom = ZoomTool(component=plot, tool_mode=\"box\", always_on=False) plot.overlays.append(zoom) return plot #=============================================================================== #", "\"z\" brings up the Zoom Box, and you can click-drag", "Enthought library imports from enable.api import Component, ComponentEditor from traits.api", "HasTraits, Instance from traitsui.api import Item, Group, View # Chaco", "and give it this data pd = ArrayPlotData() pd.set_data(\"index\", x)", "the plot plot = Plot(pd) plot.plot((\"index\", \"value\"), type=\"scatter\", marker=\"circle\", index_sort=\"ascending\",", "import random # Enthought library imports from enable.api import Component,", "numpts = 5000 x = sort(random(numpts)) y = random(numpts) #", "PanTool, ZoomTool #=============================================================================== # # Create the Chaco plot. #===============================================================================", "y) # Create the plot plot = Plot(pd) plot.plot((\"index\", \"value\"),", "panning and zooming. Interacting with the plot: - Left-mouse-drag pans", "you forwards and backwards through the \"zoom history\". \"\"\" #", "constrain_key=\"shift\")) zoom = ZoomTool(component=plot, tool_mode=\"box\", always_on=False) plot.overlays.append(zoom) return plot #===============================================================================", "to use for the plot view. size = (650, 650)", "properties plot.title = \"Scatter Plot\" plot.line_width = 0.5 plot.padding =", "the \"zoom history\". \"\"\" # Major library imports from numpy", "obect and give it this data pd = ArrayPlotData() pd.set_data(\"index\",", "= \"Scatter Plot\" plot.line_width = 0.5 plot.padding = 50 #", "scatter plot of a set of random points, with basic", "random # Enthought library imports from enable.api import Component, ComponentEditor", "points, with basic Chaco panning and zooming. Interacting with the", "traitsui.api import Item, Group, View # Chaco imports from chaco.api", "x = sort(random(numpts)) y = random(numpts) # Create a plot", "y = random(numpts) # Create a plot data obect and", "import ArrayPlotData, Plot from chaco.tools.api import PanTool, ZoomTool #=============================================================================== #", "import sort from numpy.random import random # Enthought library imports", "Plot from chaco.tools.api import PanTool, ZoomTool #=============================================================================== # # Create", "some tools to the plot plot.tools.append(PanTool(plot, constrain_key=\"shift\")) zoom = ZoomTool(component=plot,", "5000 x = sort(random(numpts)) y = random(numpts) # Create a", "0.5 plot.padding = 50 # Attach some tools to the", "# Attributes to use for the plot view. size =", "out. - Pressing \"z\" brings up the Zoom Box, and", "Item, Group, View # Chaco imports from chaco.api import ArrayPlotData,", "\"Basic scatter plot\" bg_color=\"lightgray\" #=============================================================================== # # Demo class that", "chaco.api import ArrayPlotData, Plot from chaco.tools.api import PanTool, ZoomTool #===============================================================================", "chaco.tools.api import PanTool, ZoomTool #=============================================================================== # # Create the Chaco", "# # Demo class that is used by the demo.py", "data numpts = 5000 x = sort(random(numpts)) y = random(numpts)", "some of the plot properties plot.title = \"Scatter Plot\" plot.line_width", "some data numpts = 5000 x = sort(random(numpts)) y =", "Mouse wheel up and down zooms the plot in and", "plot = Plot(pd) plot.plot((\"index\", \"value\"), type=\"scatter\", marker=\"circle\", index_sort=\"ascending\", color=\"orange\", marker_size=3,", "to the plot plot.tools.append(PanTool(plot, constrain_key=\"shift\")) zoom = ZoomTool(component=plot, tool_mode=\"box\", always_on=False)", "return plot #=============================================================================== # Attributes to use for the plot", "for the plot view. size = (650, 650) title =", "\"vertical\"), resizable=True, title=title ) def _plot_default(self): return _create_plot_component() demo =", "\"\"\" Scatter plot with panning and zooming Shows a scatter", "of zoom boxes, pressing alt-left-arrow and alt-right-arrow moves you forwards", "sort(random(numpts)) y = random(numpts) # Create a plot data obect", "that is used by the demo.py application. #=============================================================================== class Demo(HasTraits):", "orientation = \"vertical\"), resizable=True, title=title ) def _plot_default(self): return _create_plot_component()", "it this data pd = ArrayPlotData() pd.set_data(\"index\", x) pd.set_data(\"value\", y)", "#=============================================================================== def _create_plot_component(): # Create some data numpts = 5000", "demo.py application. #=============================================================================== class Demo(HasTraits): plot = Instance(Component) traits_view =", "panning and zooming Shows a scatter plot of a set", "numpy import sort from numpy.random import random # Enthought library", "alt-right-arrow moves you forwards and backwards through the \"zoom history\".", "# Create the Chaco plot. #=============================================================================== def _create_plot_component(): # Create", "plot = Instance(Component) traits_view = View( Group( Item('plot', editor=ComponentEditor(size=size, bgcolor=bg_color),", "random points, with basic Chaco panning and zooming. Interacting with", "= sort(random(numpts)) y = random(numpts) # Create a plot data", "import Item, Group, View # Chaco imports from chaco.api import", "ZoomTool(component=plot, tool_mode=\"box\", always_on=False) plot.overlays.append(zoom) return plot #=============================================================================== # Attributes to", "zooms the plot in and out. - Pressing \"z\" brings", "the plot in and out. - Pressing \"z\" brings up", "from chaco.tools.api import PanTool, ZoomTool #=============================================================================== # # Create the", "- Left-mouse-drag pans the plot. - Mouse wheel up and", "\"Scatter Plot\" plot.line_width = 0.5 plot.padding = 50 # Attach", "with basic Chaco panning and zooming. Interacting with the plot:", "View # Chaco imports from chaco.api import ArrayPlotData, Plot from", "Plot\" plot.line_width = 0.5 plot.padding = 50 # Attach some", "pd.set_data(\"value\", y) # Create the plot plot = Plot(pd) plot.plot((\"index\",", "plot.tools.append(PanTool(plot, constrain_key=\"shift\")) zoom = ZoomTool(component=plot, tool_mode=\"box\", always_on=False) plot.overlays.append(zoom) return plot", "editor=ComponentEditor(size=size, bgcolor=bg_color), show_label=False), orientation = \"vertical\"), resizable=True, title=title ) def", "used by the demo.py application. #=============================================================================== class Demo(HasTraits): plot =", "def _create_plot_component(): # Create some data numpts = 5000 x", "with panning and zooming Shows a scatter plot of a", "brings up the Zoom Box, and you can click-drag a", "you can click-drag a rectangular region to zoom. If you", "click-drag a rectangular region to zoom. If you use a", "use a sequence of zoom boxes, pressing alt-left-arrow and alt-right-arrow", "pd.set_data(\"index\", x) pd.set_data(\"value\", y) # Create the plot plot =", "marker_size=3, bgcolor=\"white\") # Tweak some of the plot properties plot.title", "from chaco.api import ArrayPlotData, Plot from chaco.tools.api import PanTool, ZoomTool", "of the plot properties plot.title = \"Scatter Plot\" plot.line_width =", "index_sort=\"ascending\", color=\"orange\", marker_size=3, bgcolor=\"white\") # Tweak some of the plot", "Create the Chaco plot. #=============================================================================== def _create_plot_component(): # Create some", "#=============================================================================== # # Demo class that is used by the", "the demo.py application. #=============================================================================== class Demo(HasTraits): plot = Instance(Component) traits_view", "_plot_default(self): return _create_plot_component() demo = Demo() if __name__ == \"__main__\":", "- Pressing \"z\" brings up the Zoom Box, and you", "650) title = \"Basic scatter plot\" bg_color=\"lightgray\" #=============================================================================== # #", "tool_mode=\"box\", always_on=False) plot.overlays.append(zoom) return plot #=============================================================================== # Attributes to use", "# Create a plot data obect and give it this", "ZoomTool #=============================================================================== # # Create the Chaco plot. #=============================================================================== def", "Group( Item('plot', editor=ComponentEditor(size=size, bgcolor=bg_color), show_label=False), orientation = \"vertical\"), resizable=True, title=title", "Major library imports from numpy import sort from numpy.random import", "a plot data obect and give it this data pd", "# Create the plot plot = Plot(pd) plot.plot((\"index\", \"value\"), type=\"scatter\",", "basic Chaco panning and zooming. Interacting with the plot: -", "= 50 # Attach some tools to the plot plot.tools.append(PanTool(plot,", "show_label=False), orientation = \"vertical\"), resizable=True, title=title ) def _plot_default(self): return", "the plot. - Mouse wheel up and down zooms the", "Attach some tools to the plot plot.tools.append(PanTool(plot, constrain_key=\"shift\")) zoom =", "this data pd = ArrayPlotData() pd.set_data(\"index\", x) pd.set_data(\"value\", y) #", "data obect and give it this data pd = ArrayPlotData()", "x) pd.set_data(\"value\", y) # Create the plot plot = Plot(pd)", "_create_plot_component() demo = Demo() if __name__ == \"__main__\": demo.configure_traits() #--EOF---", "backwards through the \"zoom history\". \"\"\" # Major library imports", "in and out. - Pressing \"z\" brings up the Zoom", "\"\"\" # Major library imports from numpy import sort from", "# Major library imports from numpy import sort from numpy.random", "the plot view. size = (650, 650) title = \"Basic", "to zoom. If you use a sequence of zoom boxes,", "view. size = (650, 650) title = \"Basic scatter plot\"", "plot. #=============================================================================== def _create_plot_component(): # Create some data numpts =", "Shows a scatter plot of a set of random points,", "forwards and backwards through the \"zoom history\". \"\"\" # Major", "# Enthought library imports from enable.api import Component, ComponentEditor from", "plot.line_width = 0.5 plot.padding = 50 # Attach some tools", "plot\" bg_color=\"lightgray\" #=============================================================================== # # Demo class that is used", "Create the plot plot = Plot(pd) plot.plot((\"index\", \"value\"), type=\"scatter\", marker=\"circle\",", "# Attach some tools to the plot plot.tools.append(PanTool(plot, constrain_key=\"shift\")) zoom", "Box, and you can click-drag a rectangular region to zoom.", "= 0.5 plot.padding = 50 # Attach some tools to", "_create_plot_component(): # Create some data numpts = 5000 x =", "and you can click-drag a rectangular region to zoom. If", "bgcolor=\"white\") # Tweak some of the plot properties plot.title =", "and alt-right-arrow moves you forwards and backwards through the \"zoom", "marker=\"circle\", index_sort=\"ascending\", color=\"orange\", marker_size=3, bgcolor=\"white\") # Tweak some of the", "plot.title = \"Scatter Plot\" plot.line_width = 0.5 plot.padding = 50", "title=title ) def _plot_default(self): return _create_plot_component() demo = Demo() if", "Chaco plot. #=============================================================================== def _create_plot_component(): # Create some data numpts", "zooming. Interacting with the plot: - Left-mouse-drag pans the plot.", "plot properties plot.title = \"Scatter Plot\" plot.line_width = 0.5 plot.padding", "title = \"Basic scatter plot\" bg_color=\"lightgray\" #=============================================================================== # # Demo", "= ArrayPlotData() pd.set_data(\"index\", x) pd.set_data(\"value\", y) # Create the plot", "#=============================================================================== class Demo(HasTraits): plot = Instance(Component) traits_view = View( Group(", "plot #=============================================================================== # Attributes to use for the plot view.", "ComponentEditor from traits.api import HasTraits, Instance from traitsui.api import Item,", "Create some data numpts = 5000 x = sort(random(numpts)) y", "up and down zooms the plot in and out. -", "from enable.api import Component, ComponentEditor from traits.api import HasTraits, Instance", "from numpy import sort from numpy.random import random # Enthought", "a set of random points, with basic Chaco panning and", "#=============================================================================== # Attributes to use for the plot view. size", "you use a sequence of zoom boxes, pressing alt-left-arrow and", "Demo(HasTraits): plot = Instance(Component) traits_view = View( Group( Item('plot', editor=ComponentEditor(size=size,", "If you use a sequence of zoom boxes, pressing alt-left-arrow", "Interacting with the plot: - Left-mouse-drag pans the plot. -", "plot in and out. - Pressing \"z\" brings up the", "of random points, with basic Chaco panning and zooming. Interacting", "plot plot.tools.append(PanTool(plot, constrain_key=\"shift\")) zoom = ZoomTool(component=plot, tool_mode=\"box\", always_on=False) plot.overlays.append(zoom) return", "a rectangular region to zoom. If you use a sequence", "imports from chaco.api import ArrayPlotData, Plot from chaco.tools.api import PanTool,", "plot of a set of random points, with basic Chaco", "and down zooms the plot in and out. - Pressing", "50 # Attach some tools to the plot plot.tools.append(PanTool(plot, constrain_key=\"shift\"))", "zooming Shows a scatter plot of a set of random", "the plot properties plot.title = \"Scatter Plot\" plot.line_width = 0.5", "Chaco imports from chaco.api import ArrayPlotData, Plot from chaco.tools.api import", "give it this data pd = ArrayPlotData() pd.set_data(\"index\", x) pd.set_data(\"value\",", "tools to the plot plot.tools.append(PanTool(plot, constrain_key=\"shift\")) zoom = ZoomTool(component=plot, tool_mode=\"box\",", "numpy.random import random # Enthought library imports from enable.api import", "View( Group( Item('plot', editor=ComponentEditor(size=size, bgcolor=bg_color), show_label=False), orientation = \"vertical\"), resizable=True,", "resizable=True, title=title ) def _plot_default(self): return _create_plot_component() demo = Demo()", "ArrayPlotData, Plot from chaco.tools.api import PanTool, ZoomTool #=============================================================================== # #", "alt-left-arrow and alt-right-arrow moves you forwards and backwards through the", "and out. - Pressing \"z\" brings up the Zoom Box,", "- Mouse wheel up and down zooms the plot in", "Left-mouse-drag pans the plot. - Mouse wheel up and down", "rectangular region to zoom. If you use a sequence of", "sort from numpy.random import random # Enthought library imports from", "library imports from enable.api import Component, ComponentEditor from traits.api import", "Zoom Box, and you can click-drag a rectangular region to", "type=\"scatter\", marker=\"circle\", index_sort=\"ascending\", color=\"orange\", marker_size=3, bgcolor=\"white\") # Tweak some of", "zoom. If you use a sequence of zoom boxes, pressing", "plot with panning and zooming Shows a scatter plot of", "from traits.api import HasTraits, Instance from traitsui.api import Item, Group,", "always_on=False) plot.overlays.append(zoom) return plot #=============================================================================== # Attributes to use for", "enable.api import Component, ComponentEditor from traits.api import HasTraits, Instance from", "up the Zoom Box, and you can click-drag a rectangular", "pd = ArrayPlotData() pd.set_data(\"index\", x) pd.set_data(\"value\", y) # Create the", "= (650, 650) title = \"Basic scatter plot\" bg_color=\"lightgray\" #===============================================================================", "wheel up and down zooms the plot in and out.", "with the plot: - Left-mouse-drag pans the plot. - Mouse", "class Demo(HasTraits): plot = Instance(Component) traits_view = View( Group( Item('plot',", "the Chaco plot. #=============================================================================== def _create_plot_component(): # Create some data", "data pd = ArrayPlotData() pd.set_data(\"index\", x) pd.set_data(\"value\", y) # Create", "import Component, ComponentEditor from traits.api import HasTraits, Instance from traitsui.api", "from numpy.random import random # Enthought library imports from enable.api", "the plot: - Left-mouse-drag pans the plot. - Mouse wheel", "plot data obect and give it this data pd =", "imports from enable.api import Component, ComponentEditor from traits.api import HasTraits,", "Demo class that is used by the demo.py application. #===============================================================================", "the Zoom Box, and you can click-drag a rectangular region", "a sequence of zoom boxes, pressing alt-left-arrow and alt-right-arrow moves", "set of random points, with basic Chaco panning and zooming.", "= 5000 x = sort(random(numpts)) y = random(numpts) # Create", ") def _plot_default(self): return _create_plot_component() demo = Demo() if __name__", "Component, ComponentEditor from traits.api import HasTraits, Instance from traitsui.api import", "pans the plot. - Mouse wheel up and down zooms", "zoom boxes, pressing alt-left-arrow and alt-right-arrow moves you forwards and", "and zooming Shows a scatter plot of a set of", "region to zoom. If you use a sequence of zoom", "and backwards through the \"zoom history\". \"\"\" # Major library", "return _create_plot_component() demo = Demo() if __name__ == \"__main__\": demo.configure_traits()", "color=\"orange\", marker_size=3, bgcolor=\"white\") # Tweak some of the plot properties", "history\". \"\"\" # Major library imports from numpy import sort", "plot plot = Plot(pd) plot.plot((\"index\", \"value\"), type=\"scatter\", marker=\"circle\", index_sort=\"ascending\", color=\"orange\",", "ArrayPlotData() pd.set_data(\"index\", x) pd.set_data(\"value\", y) # Create the plot plot", "#=============================================================================== # # Create the Chaco plot. #=============================================================================== def _create_plot_component():", "= Instance(Component) traits_view = View( Group( Item('plot', editor=ComponentEditor(size=size, bgcolor=bg_color), show_label=False),", "= Plot(pd) plot.plot((\"index\", \"value\"), type=\"scatter\", marker=\"circle\", index_sort=\"ascending\", color=\"orange\", marker_size=3, bgcolor=\"white\")", "plot.plot((\"index\", \"value\"), type=\"scatter\", marker=\"circle\", index_sort=\"ascending\", color=\"orange\", marker_size=3, bgcolor=\"white\") # Tweak", "= \"Basic scatter plot\" bg_color=\"lightgray\" #=============================================================================== # # Demo class", "Create a plot data obect and give it this data", "Pressing \"z\" brings up the Zoom Box, and you can", "= \"vertical\"), resizable=True, title=title ) def _plot_default(self): return _create_plot_component() demo", "import PanTool, ZoomTool #=============================================================================== # # Create the Chaco plot.", "plot. - Mouse wheel up and down zooms the plot", "Scatter plot with panning and zooming Shows a scatter plot", "plot.overlays.append(zoom) return plot #=============================================================================== # Attributes to use for the", "(650, 650) title = \"Basic scatter plot\" bg_color=\"lightgray\" #=============================================================================== #", "# Create some data numpts = 5000 x = sort(random(numpts))", "Tweak some of the plot properties plot.title = \"Scatter Plot\"", "Instance(Component) traits_view = View( Group( Item('plot', editor=ComponentEditor(size=size, bgcolor=bg_color), show_label=False), orientation", "application. #=============================================================================== class Demo(HasTraits): plot = Instance(Component) traits_view = View(", "= random(numpts) # Create a plot data obect and give", "the plot plot.tools.append(PanTool(plot, constrain_key=\"shift\")) zoom = ZoomTool(component=plot, tool_mode=\"box\", always_on=False) plot.overlays.append(zoom)", "traits.api import HasTraits, Instance from traitsui.api import Item, Group, View", "Item('plot', editor=ComponentEditor(size=size, bgcolor=bg_color), show_label=False), orientation = \"vertical\"), resizable=True, title=title )", "plot view. size = (650, 650) title = \"Basic scatter", "Plot(pd) plot.plot((\"index\", \"value\"), type=\"scatter\", marker=\"circle\", index_sort=\"ascending\", color=\"orange\", marker_size=3, bgcolor=\"white\") #", "\"zoom history\". \"\"\" # Major library imports from numpy import", "# # Create the Chaco plot. #=============================================================================== def _create_plot_component(): #", "down zooms the plot in and out. - Pressing \"z\"", "# Chaco imports from chaco.api import ArrayPlotData, Plot from chaco.tools.api", "is used by the demo.py application. #=============================================================================== class Demo(HasTraits): plot", "moves you forwards and backwards through the \"zoom history\". \"\"\"", "plot.padding = 50 # Attach some tools to the plot", "size = (650, 650) title = \"Basic scatter plot\" bg_color=\"lightgray\"", "from traitsui.api import Item, Group, View # Chaco imports from", "# Tweak some of the plot properties plot.title = \"Scatter", "Attributes to use for the plot view. size = (650,", "boxes, pressing alt-left-arrow and alt-right-arrow moves you forwards and backwards", "def _plot_default(self): return _create_plot_component() demo = Demo() if __name__ ==", "Group, View # Chaco imports from chaco.api import ArrayPlotData, Plot", "use for the plot view. size = (650, 650) title", "traits_view = View( Group( Item('plot', editor=ComponentEditor(size=size, bgcolor=bg_color), show_label=False), orientation =", "plot: - Left-mouse-drag pans the plot. - Mouse wheel up", "by the demo.py application. #=============================================================================== class Demo(HasTraits): plot = Instance(Component)", "imports from numpy import sort from numpy.random import random #", "a scatter plot of a set of random points, with", "pressing alt-left-arrow and alt-right-arrow moves you forwards and backwards through", "Instance from traitsui.api import Item, Group, View # Chaco imports", "# Demo class that is used by the demo.py application.", "of a set of random points, with basic Chaco panning", "bgcolor=bg_color), show_label=False), orientation = \"vertical\"), resizable=True, title=title ) def _plot_default(self):", "through the \"zoom history\". \"\"\" # Major library imports from", "= ZoomTool(component=plot, tool_mode=\"box\", always_on=False) plot.overlays.append(zoom) return plot #=============================================================================== # Attributes", "library imports from numpy import sort from numpy.random import random", "random(numpts) # Create a plot data obect and give it" ]
[ "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "= 'ANY' SELENIUM_SERVER = None SELENIUM_PORT = 4444 BROWSER_WIDTH =", "# # Licensed under the Apache License, Version 2.0 (the", "configuration module of webstr selenium tests. This module provides configuration", "compliance with the License. # You may obtain a copy", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "4444 BROWSER_WIDTH = 1280 BROWSER_HEIGHT = 1024 def update_value(key_name, value,", "distributed under the License is distributed on an \"AS IS\"", "module provides configuration options along with default values and function", "module of webstr selenium tests. This module provides configuration options", "the specific language governing permissions and # limitations under the", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "except in compliance with the License. # You may obtain", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "'' BROWSER_PLATFORM = 'ANY' SELENIUM_SERVER = None SELENIUM_PORT = 4444", "of webstr selenium tests. This module provides configuration options along", "configuration options along with default values and function to redefine", "= logging.INFO SCHEME = 'https' PORT = 443 BROWSER =", "= 1280 BROWSER_HEIGHT = 1024 def update_value(key_name, value, force=False): \"\"\"", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "BROWSER_PLATFORM = 'ANY' SELENIUM_SERVER = None SELENIUM_PORT = 4444 BROWSER_WIDTH", "options along with default values and function to redefine values.", "Red Hat # # Licensed under the Apache License, Version", "1280 BROWSER_HEIGHT = 1024 def update_value(key_name, value, force=False): \"\"\" Update", "update_value(key_name, value, force=False): \"\"\" Update single value of this config", "= key_name.upper() # raise AttributeError if we try to define", "writing, software # distributed under the License is distributed on", "in writing, software # distributed under the License is distributed", "443 BROWSER = 'Firefox' BROWSER_VERSION = '' BROWSER_PLATFORM = 'ANY'", "with default values and function to redefine values. \"\"\" #", "Hat # # Licensed under the Apache License, Version 2.0", "you may not use this file except in compliance with", "1024 def update_value(key_name, value, force=False): \"\"\" Update single value of", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "language governing permissions and # limitations under the License. import", "along with default values and function to redefine values. \"\"\"", "None SELENIUM_PORT = 4444 BROWSER_WIDTH = 1280 BROWSER_HEIGHT = 1024", "use this file except in compliance with the License. #", "def update_value(key_name, value, force=False): \"\"\" Update single value of this", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "'https' PORT = 443 BROWSER = 'Firefox' BROWSER_VERSION = ''", "= 'Firefox' BROWSER_VERSION = '' BROWSER_PLATFORM = 'ANY' SELENIUM_SERVER =", "\"\"\" Update single value of this config module. \"\"\" this_module", "# raise AttributeError if we try to define new value", "key_name = key_name.upper() # raise AttributeError if we try to", "Update single value of this config module. \"\"\" this_module =", "CONDITIONS OF ANY KIND, either express or implied. # See", "SELENIUM_LOG_LEVEL = logging.INFO SCHEME = 'https' PORT = 443 BROWSER", "default values and function to redefine values. \"\"\" # Copyright", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "= '' BROWSER_PLATFORM = 'ANY' SELENIUM_SERVER = None SELENIUM_PORT =", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "single value of this config module. \"\"\" this_module = sys.modules[__name__]", "# You may obtain a copy of the License at", "KIND, either express or implied. # See the License for", "# Copyright 2016 Red Hat # # Licensed under the", "specific language governing permissions and # limitations under the License.", "# limitations under the License. import logging import sys SELENIUM_LOG_LEVEL", "this config module. \"\"\" this_module = sys.modules[__name__] key_name = key_name.upper()", "= sys.modules[__name__] key_name = key_name.upper() # raise AttributeError if we", "under the License is distributed on an \"AS IS\" BASIS,", "This module provides configuration options along with default values and", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "License for the specific language governing permissions and # limitations", "try to define new value (unless force is used) if", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "logging.INFO SCHEME = 'https' PORT = 443 BROWSER = 'Firefox'", "BROWSER_WIDTH = 1280 BROWSER_HEIGHT = 1024 def update_value(key_name, value, force=False):", "to define new value (unless force is used) if not", "the License for the specific language governing permissions and #", "(the \"License\"); # you may not use this file except", "= 'https' PORT = 443 BROWSER = 'Firefox' BROWSER_VERSION =", "redefine values. \"\"\" # Copyright 2016 Red Hat # #", "Apache License, Version 2.0 (the \"License\"); # you may not", "limitations under the License. import logging import sys SELENIUM_LOG_LEVEL =", "# you may not use this file except in compliance", "key_name.upper() # raise AttributeError if we try to define new", "either express or implied. # See the License for the", "selenium tests. This module provides configuration options along with default", "under the License. import logging import sys SELENIUM_LOG_LEVEL = logging.INFO", "OR CONDITIONS OF ANY KIND, either express or implied. #", "'Firefox' BROWSER_VERSION = '' BROWSER_PLATFORM = 'ANY' SELENIUM_SERVER = None", "to redefine values. \"\"\" # Copyright 2016 Red Hat #", "License. import logging import sys SELENIUM_LOG_LEVEL = logging.INFO SCHEME =", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "the License is distributed on an \"AS IS\" BASIS, #", "SELENIUM_SERVER = None SELENIUM_PORT = 4444 BROWSER_WIDTH = 1280 BROWSER_HEIGHT", "in compliance with the License. # You may obtain a", "sys SELENIUM_LOG_LEVEL = logging.INFO SCHEME = 'https' PORT = 443", "software # distributed under the License is distributed on an", "raise AttributeError if we try to define new value (unless", "\"\"\" # Copyright 2016 Red Hat # # Licensed under", "value, force=False): \"\"\" Update single value of this config module.", "governing permissions and # limitations under the License. import logging", "= 1024 def update_value(key_name, value, force=False): \"\"\" Update single value", "# # Unless required by applicable law or agreed to", "the License. import logging import sys SELENIUM_LOG_LEVEL = logging.INFO SCHEME", "\"\"\" Central configuration module of webstr selenium tests. This module", "and function to redefine values. \"\"\" # Copyright 2016 Red", "SCHEME = 'https' PORT = 443 BROWSER = 'Firefox' BROWSER_VERSION", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "permissions and # limitations under the License. import logging import", "Version 2.0 (the \"License\"); # you may not use this", "= 4444 BROWSER_WIDTH = 1280 BROWSER_HEIGHT = 1024 def update_value(key_name,", "law or agreed to in writing, software # distributed under", "this_module = sys.modules[__name__] key_name = key_name.upper() # raise AttributeError if", "= 443 BROWSER = 'Firefox' BROWSER_VERSION = '' BROWSER_PLATFORM =", "is used) if not force: getattr(this_module, key_name) setattr(this_module, key_name, value)", "Central configuration module of webstr selenium tests. This module provides", "and # limitations under the License. import logging import sys", "force is used) if not force: getattr(this_module, key_name) setattr(this_module, key_name,", "implied. # See the License for the specific language governing", "module. \"\"\" this_module = sys.modules[__name__] key_name = key_name.upper() # raise", "under the Apache License, Version 2.0 (the \"License\"); # you", "\"License\"); # you may not use this file except in", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "(unless force is used) if not force: getattr(this_module, key_name) setattr(this_module,", "we try to define new value (unless force is used)", "value (unless force is used) if not force: getattr(this_module, key_name)", "value of this config module. \"\"\" this_module = sys.modules[__name__] key_name", "tests. This module provides configuration options along with default values", "values. \"\"\" # Copyright 2016 Red Hat # # Licensed", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "logging import sys SELENIUM_LOG_LEVEL = logging.INFO SCHEME = 'https' PORT", "PORT = 443 BROWSER = 'Firefox' BROWSER_VERSION = '' BROWSER_PLATFORM", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "Copyright 2016 Red Hat # # Licensed under the Apache", "\"\"\" this_module = sys.modules[__name__] key_name = key_name.upper() # raise AttributeError", "2016 Red Hat # # Licensed under the Apache License,", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "sys.modules[__name__] key_name = key_name.upper() # raise AttributeError if we try", "values and function to redefine values. \"\"\" # Copyright 2016", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "BROWSER = 'Firefox' BROWSER_VERSION = '' BROWSER_PLATFORM = 'ANY' SELENIUM_SERVER", "to in writing, software # distributed under the License is", "function to redefine values. \"\"\" # Copyright 2016 Red Hat", "of this config module. \"\"\" this_module = sys.modules[__name__] key_name =", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "AttributeError if we try to define new value (unless force", "import sys SELENIUM_LOG_LEVEL = logging.INFO SCHEME = 'https' PORT =", "You may obtain a copy of the License at #", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "BROWSER_HEIGHT = 1024 def update_value(key_name, value, force=False): \"\"\" Update single", "BROWSER_VERSION = '' BROWSER_PLATFORM = 'ANY' SELENIUM_SERVER = None SELENIUM_PORT", "'ANY' SELENIUM_SERVER = None SELENIUM_PORT = 4444 BROWSER_WIDTH = 1280", "config module. \"\"\" this_module = sys.modules[__name__] key_name = key_name.upper() #", "required by applicable law or agreed to in writing, software", "define new value (unless force is used) if not force:", "webstr selenium tests. This module provides configuration options along with", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "SELENIUM_PORT = 4444 BROWSER_WIDTH = 1280 BROWSER_HEIGHT = 1024 def", "new value (unless force is used) if not force: getattr(this_module,", "provides configuration options along with default values and function to", "with the License. # You may obtain a copy of", "this file except in compliance with the License. # You", "= None SELENIUM_PORT = 4444 BROWSER_WIDTH = 1280 BROWSER_HEIGHT =", "the Apache License, Version 2.0 (the \"License\"); # you may", "force=False): \"\"\" Update single value of this config module. \"\"\"", "if we try to define new value (unless force is", "import logging import sys SELENIUM_LOG_LEVEL = logging.INFO SCHEME = 'https'" ]
[ "__call__(self, population): chroms_to_mutate = random.sample( population, round(self.chrom_mut_chance * len(population))) for", "gen_mut_chance): self.chrom_mut_chance = chrom_mut_chance self.gen_mut_chance = gen_mut_chance def __call__(self, population):", "chrom in chroms_to_mutate: genes_to_mutate = random.sample( range(len(chrom)), round(self.gen_mut_chance * len(chrom)))", "round(self.chrom_mut_chance * len(population))) for chrom in chroms_to_mutate: genes_to_mutate = random.sample(", "def __init__(self, chrom_mut_chance, gen_mut_chance): self.chrom_mut_chance = chrom_mut_chance self.gen_mut_chance = gen_mut_chance", "random.sample( range(len(chrom)), round(self.gen_mut_chance * len(chrom))) for gt in genes_to_mutate: chrom[gt]", "class Mutation: def __init__(self, chrom_mut_chance, gen_mut_chance): self.chrom_mut_chance = chrom_mut_chance self.gen_mut_chance", "def __call__(self, population): chroms_to_mutate = random.sample( population, round(self.chrom_mut_chance * len(population)))", "gen_mut_chance def __call__(self, population): chroms_to_mutate = random.sample( population, round(self.chrom_mut_chance *", "random.sample( population, round(self.chrom_mut_chance * len(population))) for chrom in chroms_to_mutate: genes_to_mutate", "= chrom_mut_chance self.gen_mut_chance = gen_mut_chance def __call__(self, population): chroms_to_mutate =", "population): chroms_to_mutate = random.sample( population, round(self.chrom_mut_chance * len(population))) for chrom", "* len(chrom))) for gt in genes_to_mutate: chrom[gt] = int(not bool(chrom[gt]))", "range(len(chrom)), round(self.gen_mut_chance * len(chrom))) for gt in genes_to_mutate: chrom[gt] =", "import random class Mutation: def __init__(self, chrom_mut_chance, gen_mut_chance): self.chrom_mut_chance =", "chroms_to_mutate = random.sample( population, round(self.chrom_mut_chance * len(population))) for chrom in", "= gen_mut_chance def __call__(self, population): chroms_to_mutate = random.sample( population, round(self.chrom_mut_chance", "Mutation: def __init__(self, chrom_mut_chance, gen_mut_chance): self.chrom_mut_chance = chrom_mut_chance self.gen_mut_chance =", "<filename>operations/mutations/mutation.py import random class Mutation: def __init__(self, chrom_mut_chance, gen_mut_chance): self.chrom_mut_chance", "genes_to_mutate = random.sample( range(len(chrom)), round(self.gen_mut_chance * len(chrom))) for gt in", "* len(population))) for chrom in chroms_to_mutate: genes_to_mutate = random.sample( range(len(chrom)),", "= random.sample( population, round(self.chrom_mut_chance * len(population))) for chrom in chroms_to_mutate:", "chroms_to_mutate: genes_to_mutate = random.sample( range(len(chrom)), round(self.gen_mut_chance * len(chrom))) for gt", "len(population))) for chrom in chroms_to_mutate: genes_to_mutate = random.sample( range(len(chrom)), round(self.gen_mut_chance", "random class Mutation: def __init__(self, chrom_mut_chance, gen_mut_chance): self.chrom_mut_chance = chrom_mut_chance", "population, round(self.chrom_mut_chance * len(population))) for chrom in chroms_to_mutate: genes_to_mutate =", "chrom_mut_chance self.gen_mut_chance = gen_mut_chance def __call__(self, population): chroms_to_mutate = random.sample(", "= random.sample( range(len(chrom)), round(self.gen_mut_chance * len(chrom))) for gt in genes_to_mutate:", "in chroms_to_mutate: genes_to_mutate = random.sample( range(len(chrom)), round(self.gen_mut_chance * len(chrom))) for", "chrom_mut_chance, gen_mut_chance): self.chrom_mut_chance = chrom_mut_chance self.gen_mut_chance = gen_mut_chance def __call__(self,", "round(self.gen_mut_chance * len(chrom))) for gt in genes_to_mutate: chrom[gt] = int(not", "for gt in genes_to_mutate: chrom[gt] = int(not bool(chrom[gt])) return population", "len(chrom))) for gt in genes_to_mutate: chrom[gt] = int(not bool(chrom[gt])) return", "__init__(self, chrom_mut_chance, gen_mut_chance): self.chrom_mut_chance = chrom_mut_chance self.gen_mut_chance = gen_mut_chance def", "for chrom in chroms_to_mutate: genes_to_mutate = random.sample( range(len(chrom)), round(self.gen_mut_chance *", "self.gen_mut_chance = gen_mut_chance def __call__(self, population): chroms_to_mutate = random.sample( population,", "self.chrom_mut_chance = chrom_mut_chance self.gen_mut_chance = gen_mut_chance def __call__(self, population): chroms_to_mutate" ]
[ "for i in range(len(chars)): if (i + 1) % 20", "* [0] # For each lowercase letter in the list,", "displayCounts(counts) def createList(): \"\"\"Create a list of characters.\"\"\" # Create", "def createList(): \"\"\"Create a list of characters.\"\"\" # Create an", "counts = 26 * [0] # For each lowercase letter", "26 integers with initial value 0 counts = 26 *", "range(100): chars.append(RandomCharacter.getRandomLowerCaseLetter()) # Return the list return chars def displayList(chars):", "letter in the list, count it for i in range(len(chars)):", "+ ord('a')), end=' ') print() main() # Call the main", "Display the list print(\"The lowercase letters are:\") displayList(chars) # Count", "end=' ') def countLetters(chars): \"\"\"Count the occurrences of each letter.\"\"\"", "Display the characters in the list 20 on each line", "of characters.\"\"\" # Create an empty list chars = []", "displayCounts(counts): \"\"\"Display counts.\"\"\" for i in range(len(counts)): if (i +", "main(): \"\"\"Main.\"\"\" # Create a list of characters chars =", "1 return counts def displayCounts(counts): \"\"\"Display counts.\"\"\" for i in", "in range(len(chars)): if (i + 1) % 20 == 0:", "else: print(chars[i], end=' ') def countLetters(chars): \"\"\"Count the occurrences of", "% 10 == 0: print(counts[i], chr(i + ord('a'))) else: print(counts[i],", "the list 20 on each line for i in range(len(chars)):", "# Create an empty list chars = [] # Create", "print(\"The occurrences of each letter are:\") displayCounts(counts) def createList(): \"\"\"Create", "characters.\"\"\" # Create an empty list chars = [] #", "count it for i in range(len(chars)): counts[ord(chars[i]) - ord('a')] +=", "list, count it for i in range(len(chars)): counts[ord(chars[i]) - ord('a')]", "<filename>examples/CountLettersInList.py import RandomCharacter # Defined in Listing 6.9 def main():", "the list, count it for i in range(len(chars)): counts[ord(chars[i]) -", "lowercase letters randomly and add them to the list for", "# Display the characters in the list 20 on each", "displayList(chars): \"\"\"Display the list of characters.\"\"\" # Display the characters", "a list of 26 integers with initial value 0 counts", "Create a list of characters chars = createList() # Display", "[] # Create lowercase letters randomly and add them to", "\"\"\"Display the list of characters.\"\"\" # Display the characters in", "in the list, count it for i in range(len(chars)): counts[ord(chars[i])", "# Return the list return chars def displayList(chars): \"\"\"Display the", "1) % 20 == 0: print(chars[i]) else: print(chars[i], end=' ')", "20 on each line for i in range(len(chars)): if (i", "Create an empty list chars = [] # Create lowercase", "each lowercase letter in the list, count it for i", "def displayList(chars): \"\"\"Display the list of characters.\"\"\" # Display the", "letters randomly and add them to the list for i", "in range(100): chars.append(RandomCharacter.getRandomLowerCaseLetter()) # Return the list return chars def", "in range(len(counts)): if (i + 1) % 10 == 0:", "return counts def displayCounts(counts): \"\"\"Display counts.\"\"\" for i in range(len(counts)):", "For each lowercase letter in the list, count it for", "- ord('a')] += 1 return counts def displayCounts(counts): \"\"\"Display counts.\"\"\"", "# Defined in Listing 6.9 def main(): \"\"\"Main.\"\"\" # Create", "on each line for i in range(len(chars)): if (i +", "# For each lowercase letter in the list, count it", "chars = [] # Create lowercase letters randomly and add", "in Listing 6.9 def main(): \"\"\"Main.\"\"\" # Create a list", "createList() # Display the list print(\"The lowercase letters are:\") displayList(chars)", "= createList() # Display the list print(\"The lowercase letters are:\")", "occurrences of each letter counts = countLetters(chars) # Display counts", "to the list for i in range(100): chars.append(RandomCharacter.getRandomLowerCaseLetter()) # Return", "counts def displayCounts(counts): \"\"\"Display counts.\"\"\" for i in range(len(counts)): if", "each letter are:\") displayCounts(counts) def createList(): \"\"\"Create a list of", "letter are:\") displayCounts(counts) def createList(): \"\"\"Create a list of characters.\"\"\"", "randomly and add them to the list for i in", "') def countLetters(chars): \"\"\"Count the occurrences of each letter.\"\"\" #", "of characters chars = createList() # Display the list print(\"The", "for i in range(len(counts)): if (i + 1) % 10", "list return chars def displayList(chars): \"\"\"Display the list of characters.\"\"\"", "chars def displayList(chars): \"\"\"Display the list of characters.\"\"\" # Display", "the occurrences of each letter.\"\"\" # Create a list of", "displayList(chars) # Count the occurrences of each letter counts =", "value 0 counts = 26 * [0] # For each", "occurrences of each letter are:\") displayCounts(counts) def createList(): \"\"\"Create a", "add them to the list for i in range(100): chars.append(RandomCharacter.getRandomLowerCaseLetter())", "list print(\"The lowercase letters are:\") displayList(chars) # Count the occurrences", "\"\"\"Main.\"\"\" # Create a list of characters chars = createList()", "list of characters.\"\"\" # Display the characters in the list", "list of 26 integers with initial value 0 counts =", "20 == 0: print(chars[i]) else: print(chars[i], end=' ') def countLetters(chars):", "10 == 0: print(counts[i], chr(i + ord('a'))) else: print(counts[i], chr(i", "each letter.\"\"\" # Create a list of 26 integers with", "are:\") displayCounts(counts) def createList(): \"\"\"Create a list of characters.\"\"\" #", "ord('a')] += 1 return counts def displayCounts(counts): \"\"\"Display counts.\"\"\" for", "== 0: print(chars[i]) else: print(chars[i], end=' ') def countLetters(chars): \"\"\"Count", "+ 1) % 10 == 0: print(counts[i], chr(i + ord('a')))", "Create lowercase letters randomly and add them to the list", "and add them to the list for i in range(100):", "0: print(counts[i], chr(i + ord('a'))) else: print(counts[i], chr(i + ord('a')),", "(i + 1) % 20 == 0: print(chars[i]) else: print(chars[i],", "of each letter are:\") displayCounts(counts) def createList(): \"\"\"Create a list", "list 20 on each line for i in range(len(chars)): if", "lowercase letter in the list, count it for i in", "for i in range(len(chars)): counts[ord(chars[i]) - ord('a')] += 1 return", "def displayCounts(counts): \"\"\"Display counts.\"\"\" for i in range(len(counts)): if (i", "+ 1) % 20 == 0: print(chars[i]) else: print(chars[i], end='", "list chars = [] # Create lowercase letters randomly and", "def main(): \"\"\"Main.\"\"\" # Create a list of characters chars", "return chars def displayList(chars): \"\"\"Display the list of characters.\"\"\" #", "26 * [0] # For each lowercase letter in the", "for i in range(100): chars.append(RandomCharacter.getRandomLowerCaseLetter()) # Return the list return", "if (i + 1) % 20 == 0: print(chars[i]) else:", "# Count the occurrences of each letter counts = countLetters(chars)", "of each letter.\"\"\" # Create a list of 26 integers", "chars = createList() # Display the list print(\"The lowercase letters", "empty list chars = [] # Create lowercase letters randomly", "an empty list chars = [] # Create lowercase letters", "def countLetters(chars): \"\"\"Count the occurrences of each letter.\"\"\" # Create", "letter counts = countLetters(chars) # Display counts print(\"The occurrences of", "chr(i + ord('a'))) else: print(counts[i], chr(i + ord('a')), end=' ')", "chars.append(RandomCharacter.getRandomLowerCaseLetter()) # Return the list return chars def displayList(chars): \"\"\"Display", "0 counts = 26 * [0] # For each lowercase", "of 26 integers with initial value 0 counts = 26", "letter.\"\"\" # Create a list of 26 integers with initial", "range(len(chars)): if (i + 1) % 20 == 0: print(chars[i])", "i in range(len(chars)): if (i + 1) % 20 ==", "[0] # For each lowercase letter in the list, count", "# Display the list print(\"The lowercase letters are:\") displayList(chars) #", "# Create a list of 26 integers with initial value", "= countLetters(chars) # Display counts print(\"The occurrences of each letter", "a list of characters chars = createList() # Display the", "each line for i in range(len(chars)): if (i + 1)", "# Display counts print(\"The occurrences of each letter are:\") displayCounts(counts)", "list of characters chars = createList() # Display the list", "initial value 0 counts = 26 * [0] # For", "the list for i in range(100): chars.append(RandomCharacter.getRandomLowerCaseLetter()) # Return the", "ord('a')), end=' ') print() main() # Call the main function", "+= 1 return counts def displayCounts(counts): \"\"\"Display counts.\"\"\" for i", "lowercase letters are:\") displayList(chars) # Count the occurrences of each", "Count the occurrences of each letter counts = countLetters(chars) #", "list for i in range(100): chars.append(RandomCharacter.getRandomLowerCaseLetter()) # Return the list", "+ ord('a'))) else: print(counts[i], chr(i + ord('a')), end=' ') print()", "range(len(counts)): if (i + 1) % 10 == 0: print(counts[i],", "print(counts[i], chr(i + ord('a')), end=' ') print() main() # Call", "# Create lowercase letters randomly and add them to the", "Create a list of 26 integers with initial value 0", "(i + 1) % 10 == 0: print(counts[i], chr(i +", "if (i + 1) % 10 == 0: print(counts[i], chr(i", "== 0: print(counts[i], chr(i + ord('a'))) else: print(counts[i], chr(i +", "else: print(counts[i], chr(i + ord('a')), end=' ') print() main() #", "characters in the list 20 on each line for i", "line for i in range(len(chars)): if (i + 1) %", "with initial value 0 counts = 26 * [0] #", "in range(len(chars)): counts[ord(chars[i]) - ord('a')] += 1 return counts def", "chr(i + ord('a')), end=' ') print() main() # Call the", "\"\"\"Count the occurrences of each letter.\"\"\" # Create a list", "the list print(\"The lowercase letters are:\") displayList(chars) # Count the", "of characters.\"\"\" # Display the characters in the list 20", "import RandomCharacter # Defined in Listing 6.9 def main(): \"\"\"Main.\"\"\"", "\"\"\"Display counts.\"\"\" for i in range(len(counts)): if (i + 1)", "createList(): \"\"\"Create a list of characters.\"\"\" # Create an empty", "characters.\"\"\" # Display the characters in the list 20 on", "6.9 def main(): \"\"\"Main.\"\"\" # Create a list of characters", "the occurrences of each letter counts = countLetters(chars) # Display", "a list of characters.\"\"\" # Create an empty list chars", "the characters in the list 20 on each line for", "ord('a'))) else: print(counts[i], chr(i + ord('a')), end=' ') print() main()", "i in range(100): chars.append(RandomCharacter.getRandomLowerCaseLetter()) # Return the list return chars", "of each letter counts = countLetters(chars) # Display counts print(\"The", "each letter counts = countLetters(chars) # Display counts print(\"The occurrences", "the list of characters.\"\"\" # Display the characters in the", "print(\"The lowercase letters are:\") displayList(chars) # Count the occurrences of", "= 26 * [0] # For each lowercase letter in", "= [] # Create lowercase letters randomly and add them", "RandomCharacter # Defined in Listing 6.9 def main(): \"\"\"Main.\"\"\" #", "integers with initial value 0 counts = 26 * [0]", "\"\"\"Create a list of characters.\"\"\" # Create an empty list", "% 20 == 0: print(chars[i]) else: print(chars[i], end=' ') def", "# Create a list of characters chars = createList() #", "range(len(chars)): counts[ord(chars[i]) - ord('a')] += 1 return counts def displayCounts(counts):", "in the list 20 on each line for i in", "i in range(len(chars)): counts[ord(chars[i]) - ord('a')] += 1 return counts", "characters chars = createList() # Display the list print(\"The lowercase", "i in range(len(counts)): if (i + 1) % 10 ==", "counts print(\"The occurrences of each letter are:\") displayCounts(counts) def createList():", "counts[ord(chars[i]) - ord('a')] += 1 return counts def displayCounts(counts): \"\"\"Display", "0: print(chars[i]) else: print(chars[i], end=' ') def countLetters(chars): \"\"\"Count the", "print(chars[i], end=' ') def countLetters(chars): \"\"\"Count the occurrences of each", "countLetters(chars) # Display counts print(\"The occurrences of each letter are:\")", "Defined in Listing 6.9 def main(): \"\"\"Main.\"\"\" # Create a", "occurrences of each letter.\"\"\" # Create a list of 26", "print(counts[i], chr(i + ord('a'))) else: print(counts[i], chr(i + ord('a')), end='", "them to the list for i in range(100): chars.append(RandomCharacter.getRandomLowerCaseLetter()) #", "letters are:\") displayList(chars) # Count the occurrences of each letter", "Listing 6.9 def main(): \"\"\"Main.\"\"\" # Create a list of", "Display counts print(\"The occurrences of each letter are:\") displayCounts(counts) def", "Return the list return chars def displayList(chars): \"\"\"Display the list", "print(chars[i]) else: print(chars[i], end=' ') def countLetters(chars): \"\"\"Count the occurrences", "it for i in range(len(chars)): counts[ord(chars[i]) - ord('a')] += 1", "1) % 10 == 0: print(counts[i], chr(i + ord('a'))) else:", "countLetters(chars): \"\"\"Count the occurrences of each letter.\"\"\" # Create a", "counts = countLetters(chars) # Display counts print(\"The occurrences of each", "list of characters.\"\"\" # Create an empty list chars =", "counts.\"\"\" for i in range(len(counts)): if (i + 1) %", "the list return chars def displayList(chars): \"\"\"Display the list of", "are:\") displayList(chars) # Count the occurrences of each letter counts" ]
[ "you can use the ``Config`` API:: from ddtrace import config,", "To configure the Vertica integration globally you can use the", "# override the service and tracer to be used Pin.override(conn,", "missing_modules: from .patch import patch, unpatch __all__ = [patch, unpatch]", "use the ``patch`` function. Note the ordering of the following", "patch(vertica=True) config.vertica['service_name'] = 'my-vertica-database' To configure the Vertica integration on", "conn = vertica_python.connect(**YOUR_VERTICA_CONFIG) # override the service and tracer to", "Tracer() conn = vertica_python.connect(**YOUR_VERTICA_CONFIG) # override the service and tracer", "from ...utils.importlib import require_modules required_modules = ['vertica_python'] with require_modules(required_modules) as", "\"\"\" The Vertica integration will trace queries made using the", "ddtrace import patch patch(vertica=True) import vertica_python # use vertica_python like", "the ``ls-trace-run`` command. Vertica is instrumented on import. To instrument", "use the ``Pin`` API:: from ddtrace import Pin, patch, Tracer", "using the vertica-python library. Vertica will be automatically instrumented with", "function. Note the ordering of the following statements:: from ddtrace", "instrument Vertica manually use the ``patch`` function. Note the ordering", "\"\"\" from ...utils.importlib import require_modules required_modules = ['vertica_python'] with require_modules(required_modules)", "Vertica integration on an instance-per-instance basis use the ``Pin`` API::", "will be automatically instrumented with ``patch_all``, or when using the", "globally you can use the ``Config`` API:: from ddtrace import", "will trace queries made using the vertica-python library. Vertica will", "config, patch patch(vertica=True) config.vertica['service_name'] = 'my-vertica-database' To configure the Vertica", "from ddtrace import config, patch patch(vertica=True) config.vertica['service_name'] = 'my-vertica-database' To", "command. Vertica is instrumented on import. To instrument Vertica manually", "tracer=custom_tracer) \"\"\" from ...utils.importlib import require_modules required_modules = ['vertica_python'] with", "the following statements:: from ddtrace import patch patch(vertica=True) import vertica_python", "Vertica is instrumented on import. To instrument Vertica manually use", "config.vertica['service_name'] = 'my-vertica-database' To configure the Vertica integration on an", "vertica_python.connect(**YOUR_VERTICA_CONFIG) # override the service and tracer to be used", "library. Vertica will be automatically instrumented with ``patch_all``, or when", "and tracer to be used Pin.override(conn, service='myverticaservice', tracer=custom_tracer) \"\"\" from", "required_modules = ['vertica_python'] with require_modules(required_modules) as missing_modules: if not missing_modules:", "automatically instrumented with ``patch_all``, or when using the ``ls-trace-run`` command.", "or when using the ``ls-trace-run`` command. Vertica is instrumented on", "vertica_python like usual To configure the Vertica integration globally you", "basis use the ``Pin`` API:: from ddtrace import Pin, patch,", "used Pin.override(conn, service='myverticaservice', tracer=custom_tracer) \"\"\" from ...utils.importlib import require_modules required_modules", "manually use the ``patch`` function. Note the ordering of the", "import vertica_python custom_tracer = Tracer() conn = vertica_python.connect(**YOUR_VERTICA_CONFIG) # override", "import require_modules required_modules = ['vertica_python'] with require_modules(required_modules) as missing_modules: if", "API:: from ddtrace import config, patch patch(vertica=True) config.vertica['service_name'] = 'my-vertica-database'", "import. To instrument Vertica manually use the ``patch`` function. Note", "with ``patch_all``, or when using the ``ls-trace-run`` command. Vertica is", "use vertica_python like usual To configure the Vertica integration globally", "'my-vertica-database' To configure the Vertica integration on an instance-per-instance basis", "queries made using the vertica-python library. Vertica will be automatically", "trace queries made using the vertica-python library. Vertica will be", "require_modules(required_modules) as missing_modules: if not missing_modules: from .patch import patch,", "= ['vertica_python'] with require_modules(required_modules) as missing_modules: if not missing_modules: from", "like usual To configure the Vertica integration globally you can", "import config, patch patch(vertica=True) config.vertica['service_name'] = 'my-vertica-database' To configure the", "vertica_python custom_tracer = Tracer() conn = vertica_python.connect(**YOUR_VERTICA_CONFIG) # override the", "integration globally you can use the ``Config`` API:: from ddtrace", "be used Pin.override(conn, service='myverticaservice', tracer=custom_tracer) \"\"\" from ...utils.importlib import require_modules", "from ddtrace import Pin, patch, Tracer patch(vertica=True) import vertica_python custom_tracer", "tracer to be used Pin.override(conn, service='myverticaservice', tracer=custom_tracer) \"\"\" from ...utils.importlib", "usual To configure the Vertica integration globally you can use", "ddtrace import Pin, patch, Tracer patch(vertica=True) import vertica_python custom_tracer =", "following statements:: from ddtrace import patch patch(vertica=True) import vertica_python #", "configure the Vertica integration on an instance-per-instance basis use the", "missing_modules: if not missing_modules: from .patch import patch, unpatch __all__", "integration on an instance-per-instance basis use the ``Pin`` API:: from", "ddtrace import config, patch patch(vertica=True) config.vertica['service_name'] = 'my-vertica-database' To configure", "integration will trace queries made using the vertica-python library. Vertica", "require_modules required_modules = ['vertica_python'] with require_modules(required_modules) as missing_modules: if not", "Vertica integration globally you can use the ``Config`` API:: from", "instance-per-instance basis use the ``Pin`` API:: from ddtrace import Pin,", "...utils.importlib import require_modules required_modules = ['vertica_python'] with require_modules(required_modules) as missing_modules:", "service and tracer to be used Pin.override(conn, service='myverticaservice', tracer=custom_tracer) \"\"\"", "is instrumented on import. To instrument Vertica manually use the", "the ``Config`` API:: from ddtrace import config, patch patch(vertica=True) config.vertica['service_name']", "API:: from ddtrace import Pin, patch, Tracer patch(vertica=True) import vertica_python", "import Pin, patch, Tracer patch(vertica=True) import vertica_python custom_tracer = Tracer()", "on import. To instrument Vertica manually use the ``patch`` function.", "``Pin`` API:: from ddtrace import Pin, patch, Tracer patch(vertica=True) import", "statements:: from ddtrace import patch patch(vertica=True) import vertica_python # use", "configure the Vertica integration globally you can use the ``Config``", "service='myverticaservice', tracer=custom_tracer) \"\"\" from ...utils.importlib import require_modules required_modules = ['vertica_python']", "custom_tracer = Tracer() conn = vertica_python.connect(**YOUR_VERTICA_CONFIG) # override the service", "the service and tracer to be used Pin.override(conn, service='myverticaservice', tracer=custom_tracer)", "using the ``ls-trace-run`` command. Vertica is instrumented on import. To", "patch(vertica=True) import vertica_python # use vertica_python like usual To configure", "patch(vertica=True) import vertica_python custom_tracer = Tracer() conn = vertica_python.connect(**YOUR_VERTICA_CONFIG) #", "vertica-python library. Vertica will be automatically instrumented with ``patch_all``, or", "['vertica_python'] with require_modules(required_modules) as missing_modules: if not missing_modules: from .patch", "vertica_python # use vertica_python like usual To configure the Vertica", "# use vertica_python like usual To configure the Vertica integration", "Note the ordering of the following statements:: from ddtrace import", "with require_modules(required_modules) as missing_modules: if not missing_modules: from .patch import", "To instrument Vertica manually use the ``patch`` function. Note the", "not missing_modules: from .patch import patch, unpatch __all__ = [patch,", "``Config`` API:: from ddtrace import config, patch patch(vertica=True) config.vertica['service_name'] =", "made using the vertica-python library. Vertica will be automatically instrumented", "when using the ``ls-trace-run`` command. Vertica is instrumented on import.", "import patch patch(vertica=True) import vertica_python # use vertica_python like usual", "= 'my-vertica-database' To configure the Vertica integration on an instance-per-instance", "patch, Tracer patch(vertica=True) import vertica_python custom_tracer = Tracer() conn =", "patch patch(vertica=True) import vertica_python # use vertica_python like usual To", "instrumented on import. To instrument Vertica manually use the ``patch``", "To configure the Vertica integration on an instance-per-instance basis use", "the ``Pin`` API:: from ddtrace import Pin, patch, Tracer patch(vertica=True)", "Pin.override(conn, service='myverticaservice', tracer=custom_tracer) \"\"\" from ...utils.importlib import require_modules required_modules =", "the Vertica integration on an instance-per-instance basis use the ``Pin``", "Pin, patch, Tracer patch(vertica=True) import vertica_python custom_tracer = Tracer() conn", "the vertica-python library. Vertica will be automatically instrumented with ``patch_all``,", "``patch_all``, or when using the ``ls-trace-run`` command. Vertica is instrumented", "be automatically instrumented with ``patch_all``, or when using the ``ls-trace-run``", "Vertica manually use the ``patch`` function. Note the ordering of", "ordering of the following statements:: from ddtrace import patch patch(vertica=True)", "can use the ``Config`` API:: from ddtrace import config, patch", "of the following statements:: from ddtrace import patch patch(vertica=True) import", "override the service and tracer to be used Pin.override(conn, service='myverticaservice',", "= vertica_python.connect(**YOUR_VERTICA_CONFIG) # override the service and tracer to be", "an instance-per-instance basis use the ``Pin`` API:: from ddtrace import", "the Vertica integration globally you can use the ``Config`` API::", "import vertica_python # use vertica_python like usual To configure the", "from ddtrace import patch patch(vertica=True) import vertica_python # use vertica_python", "Vertica will be automatically instrumented with ``patch_all``, or when using", "``ls-trace-run`` command. Vertica is instrumented on import. To instrument Vertica", "= Tracer() conn = vertica_python.connect(**YOUR_VERTICA_CONFIG) # override the service and", "to be used Pin.override(conn, service='myverticaservice', tracer=custom_tracer) \"\"\" from ...utils.importlib import", "Vertica integration will trace queries made using the vertica-python library.", "the ``patch`` function. Note the ordering of the following statements::", "use the ``Config`` API:: from ddtrace import config, patch patch(vertica=True)", "instrumented with ``patch_all``, or when using the ``ls-trace-run`` command. Vertica", "``patch`` function. Note the ordering of the following statements:: from", "Tracer patch(vertica=True) import vertica_python custom_tracer = Tracer() conn = vertica_python.connect(**YOUR_VERTICA_CONFIG)", "if not missing_modules: from .patch import patch, unpatch __all__ =", "as missing_modules: if not missing_modules: from .patch import patch, unpatch", "on an instance-per-instance basis use the ``Pin`` API:: from ddtrace", "The Vertica integration will trace queries made using the vertica-python", "patch patch(vertica=True) config.vertica['service_name'] = 'my-vertica-database' To configure the Vertica integration", "the ordering of the following statements:: from ddtrace import patch" ]
[ "/>', ] def __init__(self, data): datadriven.DataDrivenTestCase.__init__(self, data) self.data = data", "OpenIDServiceEndpoint.fromHTML('http://unused.url/', self.data) expected = [] self.failUnlessEqual(expected, actual) def pyUnitTests(): return", "self.data) expected = [] self.failUnlessEqual(expected, actual) def pyUnitTests(): return datadriven.loadTests(__name__)", "datadriven class BadLinksTestCase(datadriven.DataDrivenTestCase): cases = [ '', \"http://not.in.a.link.tag/\", '<link rel=\"openid.server\"", "self.data = data def runOneTest(self): actual = OpenIDServiceEndpoint.fromHTML('http://unused.url/', self.data) expected", "data) self.data = data def runOneTest(self): actual = OpenIDServiceEndpoint.fromHTML('http://unused.url/', self.data)", "href=\"not.in.html.or.head\" />', ] def __init__(self, data): datadriven.DataDrivenTestCase.__init__(self, data) self.data =", "datadriven.DataDrivenTestCase.__init__(self, data) self.data = data def runOneTest(self): actual = OpenIDServiceEndpoint.fromHTML('http://unused.url/',", "cases = [ '', \"http://not.in.a.link.tag/\", '<link rel=\"openid.server\" href=\"not.in.html.or.head\" />', ]", "def __init__(self, data): datadriven.DataDrivenTestCase.__init__(self, data) self.data = data def runOneTest(self):", "[ '', \"http://not.in.a.link.tag/\", '<link rel=\"openid.server\" href=\"not.in.html.or.head\" />', ] def __init__(self,", "= data def runOneTest(self): actual = OpenIDServiceEndpoint.fromHTML('http://unused.url/', self.data) expected =", "def runOneTest(self): actual = OpenIDServiceEndpoint.fromHTML('http://unused.url/', self.data) expected = [] self.failUnlessEqual(expected,", "= OpenIDServiceEndpoint.fromHTML('http://unused.url/', self.data) expected = [] self.failUnlessEqual(expected, actual) def pyUnitTests():", "] def __init__(self, data): datadriven.DataDrivenTestCase.__init__(self, data) self.data = data def", "BadLinksTestCase(datadriven.DataDrivenTestCase): cases = [ '', \"http://not.in.a.link.tag/\", '<link rel=\"openid.server\" href=\"not.in.html.or.head\" />',", "= [ '', \"http://not.in.a.link.tag/\", '<link rel=\"openid.server\" href=\"not.in.html.or.head\" />', ] def", "runOneTest(self): actual = OpenIDServiceEndpoint.fromHTML('http://unused.url/', self.data) expected = [] self.failUnlessEqual(expected, actual)", "class BadLinksTestCase(datadriven.DataDrivenTestCase): cases = [ '', \"http://not.in.a.link.tag/\", '<link rel=\"openid.server\" href=\"not.in.html.or.head\"", "import datadriven class BadLinksTestCase(datadriven.DataDrivenTestCase): cases = [ '', \"http://not.in.a.link.tag/\", '<link", "import OpenIDServiceEndpoint import datadriven class BadLinksTestCase(datadriven.DataDrivenTestCase): cases = [ '',", "<filename>desktop/core/ext-py/python-openid-2.2.5/openid/test/test_htmldiscover.py from openid.consumer.discover import OpenIDServiceEndpoint import datadriven class BadLinksTestCase(datadriven.DataDrivenTestCase): cases", "'', \"http://not.in.a.link.tag/\", '<link rel=\"openid.server\" href=\"not.in.html.or.head\" />', ] def __init__(self, data):", "actual = OpenIDServiceEndpoint.fromHTML('http://unused.url/', self.data) expected = [] self.failUnlessEqual(expected, actual) def", "from openid.consumer.discover import OpenIDServiceEndpoint import datadriven class BadLinksTestCase(datadriven.DataDrivenTestCase): cases =", "__init__(self, data): datadriven.DataDrivenTestCase.__init__(self, data) self.data = data def runOneTest(self): actual", "openid.consumer.discover import OpenIDServiceEndpoint import datadriven class BadLinksTestCase(datadriven.DataDrivenTestCase): cases = [", "rel=\"openid.server\" href=\"not.in.html.or.head\" />', ] def __init__(self, data): datadriven.DataDrivenTestCase.__init__(self, data) self.data", "data def runOneTest(self): actual = OpenIDServiceEndpoint.fromHTML('http://unused.url/', self.data) expected = []", "OpenIDServiceEndpoint import datadriven class BadLinksTestCase(datadriven.DataDrivenTestCase): cases = [ '', \"http://not.in.a.link.tag/\",", "\"http://not.in.a.link.tag/\", '<link rel=\"openid.server\" href=\"not.in.html.or.head\" />', ] def __init__(self, data): datadriven.DataDrivenTestCase.__init__(self,", "data): datadriven.DataDrivenTestCase.__init__(self, data) self.data = data def runOneTest(self): actual =", "'<link rel=\"openid.server\" href=\"not.in.html.or.head\" />', ] def __init__(self, data): datadriven.DataDrivenTestCase.__init__(self, data)" ]
[ "TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', )), ) EMAIL_BACKEND", "= get_env_var('EMAIL_HOST_PASSWORD') EMAIL_PORT = 587 EMAIL_USE_TLS = True DEFAULT_FROM_EMAIL =", ") EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = 'smtp.gmail.com' EMAIL_HOST_USER = get_env_var('EMAIL_HOST_USER')", "EMAIL_HOST_PASSWORD = get_env_var('EMAIL_HOST_PASSWORD') EMAIL_PORT = 587 EMAIL_USE_TLS = True DEFAULT_FROM_EMAIL", "= 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = 'smtp.gmail.com' EMAIL_HOST_USER = get_env_var('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD =", "True SESSION_COOKIE_SECURE = True TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader',", "'django.template.loaders.app_directories.Loader', )), ) EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = 'smtp.gmail.com' EMAIL_HOST_USER", "= get_env_var('SECRET_KEY') CSRF_COOKIE_SECURE = True SESSION_COOKIE_SECURE = True TEMPLATE_LOADERS =", "'smtp.gmail.com' EMAIL_HOST_USER = get_env_var('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD = get_env_var('EMAIL_HOST_PASSWORD') EMAIL_PORT = 587", "get_env_var('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD = get_env_var('EMAIL_HOST_PASSWORD') EMAIL_PORT = 587 EMAIL_USE_TLS = True", "= get_env_var('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD = get_env_var('EMAIL_HOST_PASSWORD') EMAIL_PORT = 587 EMAIL_USE_TLS =", "'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = 'smtp.gmail.com' EMAIL_HOST_USER = get_env_var('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD = get_env_var('EMAIL_HOST_PASSWORD')", "= ( ('django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', )), ) EMAIL_BACKEND =", ")), ) EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = 'smtp.gmail.com' EMAIL_HOST_USER =", "from .base import * SECRET_KEY = get_env_var('SECRET_KEY') CSRF_COOKIE_SECURE = True", "SESSION_COOKIE_SECURE = True TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader',", "<reponame>chiehtu/kissaten<gh_stars>0 from .base import * SECRET_KEY = get_env_var('SECRET_KEY') CSRF_COOKIE_SECURE =", "import * SECRET_KEY = get_env_var('SECRET_KEY') CSRF_COOKIE_SECURE = True SESSION_COOKIE_SECURE =", "EMAIL_PORT = 587 EMAIL_USE_TLS = True DEFAULT_FROM_EMAIL = '' USERENA_USE_HTTPS", "= True SESSION_COOKIE_SECURE = True TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', (", "EMAIL_HOST = 'smtp.gmail.com' EMAIL_HOST_USER = get_env_var('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD = get_env_var('EMAIL_HOST_PASSWORD') EMAIL_PORT", "( ('django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', )), ) EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'", "EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = 'smtp.gmail.com' EMAIL_HOST_USER = get_env_var('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD", "get_env_var('SECRET_KEY') CSRF_COOKIE_SECURE = True SESSION_COOKIE_SECURE = True TEMPLATE_LOADERS = (", "('django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', )), ) EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST", "= 587 EMAIL_USE_TLS = True DEFAULT_FROM_EMAIL = '' USERENA_USE_HTTPS =", "( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', )), ) EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST =", "SECRET_KEY = get_env_var('SECRET_KEY') CSRF_COOKIE_SECURE = True SESSION_COOKIE_SECURE = True TEMPLATE_LOADERS", ".base import * SECRET_KEY = get_env_var('SECRET_KEY') CSRF_COOKIE_SECURE = True SESSION_COOKIE_SECURE", "get_env_var('EMAIL_HOST_PASSWORD') EMAIL_PORT = 587 EMAIL_USE_TLS = True DEFAULT_FROM_EMAIL = ''", "'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', )), ) EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = 'smtp.gmail.com'", "= 'smtp.gmail.com' EMAIL_HOST_USER = get_env_var('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD = get_env_var('EMAIL_HOST_PASSWORD') EMAIL_PORT =", "= True TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', )),", "* SECRET_KEY = get_env_var('SECRET_KEY') CSRF_COOKIE_SECURE = True SESSION_COOKIE_SECURE = True", "EMAIL_HOST_USER = get_env_var('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD = get_env_var('EMAIL_HOST_PASSWORD') EMAIL_PORT = 587 EMAIL_USE_TLS", "587 EMAIL_USE_TLS = True DEFAULT_FROM_EMAIL = '' USERENA_USE_HTTPS = True", "CSRF_COOKIE_SECURE = True SESSION_COOKIE_SECURE = True TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader',", "True TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', )), )" ]
[ "# df[column_name].max()) if df[column_name].min() < 0 and df[column_name].max() <= 0:", "df.columns states = [] g.attr('node', shape='ellipse') for column_name in column_names:", "def generate_react_flow_chart_from_df(df): column_names = df.columns nodes = {} # Elipses", "edges.append([state, name1, 'one_way']) elif df[column_name].min() >= 0 and df[column_name].max() >", "edges.append([name1, state, 'both']) return dict(nodes=list(nodes.values()), edges=edges) def main(args): df =", "if df[column_name].min() < 0 and df[column_name].max() <= 0: edges.append([state, name1,", "format=graph_format) g.attr(overlap='false') g.attr(splines='true') column_names = df.columns states = [] g.attr('node',", "'state_': nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0], kind='box') edges = [] for column_name", "generate_graph(df) generate_react_flow_chart_from_df(df) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Generate Graph", "0: g.edge(state, parts[0]) elif df[column_name].min() >= 0 and df[column_name].max() >", "if column_name[:6] == 'state_': nodes[column_name[6:]] = dict(name=column_name[6:], kind='elipse') # Boxes", "graph_format='pdf'): g = Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format) g.attr(overlap='false') g.attr(splines='true') column_names", "def generate_react_flow_chart(outputs): df = pd.DataFrame() for key, value in outputs['output_states'].items():", "value['data'] return generate_react_flow_chart_from_df(df) def generate_react_flow_chart_from_df(df): column_names = df.columns nodes =", "0 and df[column_name].max() <= 0: g.edge(state, parts[0]) elif df[column_name].min() >=", "column_name[:6] != 'state_': models.append((column_name.split('_')[0], column_name)) g.node(column_name.split('_')[0]) for column_name in column_names:", "df[column_name].min() >= 0 and df[column_name].max() > 0: edges.append([name1, state, 'one_way'])", "parser.add_argument('-f', '--output_file', type=str, help='The output file to generate a graph", "column_name in column_names: if column_name[:6] != 'state_': nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0],", "dict(nodes=list(nodes.values()), edges=edges) def main(args): df = pd.read_csv(args.output_file) # generate_graph(df) generate_react_flow_chart_from_df(df)", "= pd.read_csv(args.output_file) # generate_graph(df) generate_react_flow_chart_from_df(df) if __name__ == '__main__': parser", "name1, 'one_way']) elif df[column_name].min() >= 0 and df[column_name].max() > 0:", "column_name[:6] != 'state_': parts = column_name.split('_') name1 = parts[0] state", "graphviz not installed\") def generate_graph(df, graph_format='pdf'): g = Digraph('ModelFlow', filename='modelflow.gv',", "0 and df[column_name].max() > 0: edges.append([name1, state, 'one_way']) else: edges.append([name1,", "column_names: if column_name[:6] != 'state_': models.append((column_name.split('_')[0], column_name)) g.node(column_name.split('_')[0]) for column_name", "if graph_format == 'json': # TODO: THIS DOES NOT WORK", "except: print(\"Note: Optional graphviz not installed\") def generate_graph(df, graph_format='pdf'): g", "Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format) g.attr(overlap='false') g.attr(splines='true') column_names = df.columns states", "column_name)) g.node(column_name.split('_')[0]) for column_name in column_names: if column_name[:6] != 'state_':", "and df[column_name].max() <= 0: edges.append([state, name1, 'one_way']) elif df[column_name].min() >=", "for column_name in column_names: if column_name[:6] == 'state_': states.append((column_name[6:], column_name))", "parts[0]) if graph_format == 'json': # TODO: THIS DOES NOT", "'__main__': parser = argparse.ArgumentParser(description='Generate Graph Viz') parser.add_argument('-f', '--output_file', type=str, help='The", "engine='neato', format=graph_format) g.attr(overlap='false') g.attr(splines='true') column_names = df.columns states = []", "import argparse import json try: from graphviz import Digraph except:", "0 and df[column_name].max() > 0: g.edge(parts[0], state) else: g.edge(parts[0], state)", "states = [] g.attr('node', shape='ellipse') for column_name in column_names: if", "parts = column_name.split('_') state = '_'.join(parts[1:])[6:-7] print(parts[0], state, df[column_name].min(), df[column_name].max())", "<= 0: g.edge(state, parts[0]) elif df[column_name].min() >= 0 and df[column_name].max()", "pd import argparse import json try: from graphviz import Digraph", "== 'json': # TODO: THIS DOES NOT WORK FOR MULTIPLE", "0 and df[column_name].max() <= 0: edges.append([state, name1, 'one_way']) elif df[column_name].min()", "> 0: edges.append([name1, state, 'one_way']) else: edges.append([name1, state, 'both']) return", "df[column_name].max() > 0: edges.append([name1, state, 'one_way']) else: edges.append([name1, state, 'both'])", "column_name)) g.node(column_name[6:]) models = [] g.attr('node', shape='box') for column_name in", "generate_react_flow_chart_from_df(df) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Generate Graph Viz')", "main(args): df = pd.read_csv(args.output_file) # generate_graph(df) generate_react_flow_chart_from_df(df) if __name__ ==", ">= 0 and df[column_name].max() > 0: edges.append([name1, state, 'one_way']) else:", "shape='box') for column_name in column_names: if column_name[:6] != 'state_': models.append((column_name.split('_')[0],", "if column_name[:6] != 'state_': parts = column_name.split('_') name1 = parts[0]", ">= 0 and df[column_name].max() > 0: g.edge(parts[0], state) else: g.edge(parts[0],", "column_name[:6] == 'state_': nodes[column_name[6:]] = dict(name=column_name[6:], kind='elipse') # Boxes for", "shape='ellipse') for column_name in column_names: if column_name[:6] == 'state_': states.append((column_name[6:],", "in column_names: if column_name[:6] != 'state_': parts = column_name.split('_') name1", "MULTIPLE MODELFLOWS with open('modelflow.gv.json', 'r') as f: return json.load(f) else:", "!= 'state_': models.append((column_name.split('_')[0], column_name)) g.node(column_name.split('_')[0]) for column_name in column_names: if", "Optional graphviz not installed\") def generate_graph(df, graph_format='pdf'): g = Digraph('ModelFlow',", "= column_name.split('_') state = '_'.join(parts[1:])[6:-7] print(parts[0], state, df[column_name].min(), df[column_name].max()) if", "MODELFLOWS with open('modelflow.gv.json', 'r') as f: return json.load(f) else: g.view()", "= '_'.join(parts[1:])[6:-7] # print(name1, state, df[column_name].min(), # df[column_name].max()) if df[column_name].min()", "edges=edges) def main(args): df = pd.read_csv(args.output_file) # generate_graph(df) generate_react_flow_chart_from_df(df) if", "if column_name[:6] != 'state_': models.append((column_name.split('_')[0], column_name)) g.node(column_name.split('_')[0]) for column_name in", "not installed\") def generate_graph(df, graph_format='pdf'): g = Digraph('ModelFlow', filename='modelflow.gv', engine='neato',", "= '_'.join(parts[1:])[6:-7] print(parts[0], state, df[column_name].min(), df[column_name].max()) if df[column_name].min() < 0", "state, 'one_way']) else: edges.append([name1, state, 'both']) return dict(nodes=list(nodes.values()), edges=edges) def", "g.node(column_name.split('_')[0]) for column_name in column_names: if column_name[:6] != 'state_': parts", "column_names: if column_name[:6] != 'state_': nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0], kind='box') edges", "column_names: if column_name[:6] != 'state_': parts = column_name.split('_') name1 =", "if __name__ == '__main__': parser = argparse.ArgumentParser(description='Generate Graph Viz') parser.add_argument('-f',", "= [] for column_name in column_names: if column_name[:6] != 'state_':", "g.edge(parts[0], state) g.edge(state, parts[0]) if graph_format == 'json': # TODO:", "name1 = parts[0] state = '_'.join(parts[1:])[6:-7] # print(name1, state, df[column_name].min(),", "f: return json.load(f) else: g.view() def generate_react_flow_chart(outputs): df = pd.DataFrame()", "output file to generate a graph of', required=True) args =", "if column_name[:6] == 'state_': states.append((column_name[6:], column_name)) g.node(column_name[6:]) models = []", "print(name1, state, df[column_name].min(), # df[column_name].max()) if df[column_name].min() < 0 and", "= column_name.split('_') name1 = parts[0] state = '_'.join(parts[1:])[6:-7] # print(name1,", "# TODO: THIS DOES NOT WORK FOR MULTIPLE MODELFLOWS with", "< 0 and df[column_name].max() <= 0: edges.append([state, name1, 'one_way']) elif", "value in outputs['output_states'].items(): df[key] = value['data'] return generate_react_flow_chart_from_df(df) def generate_react_flow_chart_from_df(df):", "= df.columns nodes = {} # Elipses for column_name in", "column_names: if column_name[:6] != 'state_': parts = column_name.split('_') state =", "# Elipses for column_name in column_names: if column_name[:6] == 'state_':", "nodes = {} # Elipses for column_name in column_names: if", "'state_': parts = column_name.split('_') state = '_'.join(parts[1:])[6:-7] print(parts[0], state, df[column_name].min(),", "df[key] = value['data'] return generate_react_flow_chart_from_df(df) def generate_react_flow_chart_from_df(df): column_names = df.columns", "and df[column_name].max() <= 0: g.edge(state, parts[0]) elif df[column_name].min() >= 0", "return json.load(f) else: g.view() def generate_react_flow_chart(outputs): df = pd.DataFrame() for", "state) else: g.edge(parts[0], state) g.edge(state, parts[0]) if graph_format == 'json':", "__name__ == '__main__': parser = argparse.ArgumentParser(description='Generate Graph Viz') parser.add_argument('-f', '--output_file',", "for column_name in column_names: if column_name[:6] != 'state_': models.append((column_name.split('_')[0], column_name))", "in column_names: if column_name[:6] == 'state_': states.append((column_name[6:], column_name)) g.node(column_name[6:]) models", "in column_names: if column_name[:6] != 'state_': models.append((column_name.split('_')[0], column_name)) g.node(column_name.split('_')[0]) for", "g.attr('node', shape='ellipse') for column_name in column_names: if column_name[:6] == 'state_':", "generate_react_flow_chart(outputs): df = pd.DataFrame() for key, value in outputs['output_states'].items(): df[key]", "return generate_react_flow_chart_from_df(df) def generate_react_flow_chart_from_df(df): column_names = df.columns nodes = {}", "'_'.join(parts[1:])[6:-7] # print(name1, state, df[column_name].min(), # df[column_name].max()) if df[column_name].min() <", "# print(name1, state, df[column_name].min(), # df[column_name].max()) if df[column_name].min() < 0", "'one_way']) elif df[column_name].min() >= 0 and df[column_name].max() > 0: edges.append([name1,", "def main(args): df = pd.read_csv(args.output_file) # generate_graph(df) generate_react_flow_chart_from_df(df) if __name__", "filename='modelflow.gv', engine='neato', format=graph_format) g.attr(overlap='false') g.attr(splines='true') column_names = df.columns states =", "column_name in column_names: if column_name[:6] != 'state_': models.append((column_name.split('_')[0], column_name)) g.node(column_name.split('_')[0])", "g.view() def generate_react_flow_chart(outputs): df = pd.DataFrame() for key, value in", "TODO: THIS DOES NOT WORK FOR MULTIPLE MODELFLOWS with open('modelflow.gv.json',", "generate_react_flow_chart_from_df(df): column_names = df.columns nodes = {} # Elipses for", "nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0], kind='box') edges = [] for column_name in", "= parts[0] state = '_'.join(parts[1:])[6:-7] # print(name1, state, df[column_name].min(), #", "'r') as f: return json.load(f) else: g.view() def generate_react_flow_chart(outputs): df", "as pd import argparse import json try: from graphviz import", "!= 'state_': parts = column_name.split('_') state = '_'.join(parts[1:])[6:-7] print(parts[0], state,", "= pd.DataFrame() for key, value in outputs['output_states'].items(): df[key] = value['data']", "to generate a graph of', required=True) args = parser.parse_args() main(args)", "[] for column_name in column_names: if column_name[:6] != 'state_': parts", "> 0: g.edge(parts[0], state) else: g.edge(parts[0], state) g.edge(state, parts[0]) if", "df[column_name].min() >= 0 and df[column_name].max() > 0: g.edge(parts[0], state) else:", "= df.columns states = [] g.attr('node', shape='ellipse') for column_name in", "g.attr('node', shape='box') for column_name in column_names: if column_name[:6] != 'state_':", "df = pd.DataFrame() for key, value in outputs['output_states'].items(): df[key] =", "help='The output file to generate a graph of', required=True) args", "'json': # TODO: THIS DOES NOT WORK FOR MULTIPLE MODELFLOWS", "from graphviz import Digraph except: print(\"Note: Optional graphviz not installed\")", "state = '_'.join(parts[1:])[6:-7] print(parts[0], state, df[column_name].min(), df[column_name].max()) if df[column_name].min() <", "df[column_name].max() <= 0: g.edge(state, parts[0]) elif df[column_name].min() >= 0 and", "FOR MULTIPLE MODELFLOWS with open('modelflow.gv.json', 'r') as f: return json.load(f)", "parts[0] state = '_'.join(parts[1:])[6:-7] # print(name1, state, df[column_name].min(), # df[column_name].max())", "= argparse.ArgumentParser(description='Generate Graph Viz') parser.add_argument('-f', '--output_file', type=str, help='The output file", "and df[column_name].max() > 0: g.edge(parts[0], state) else: g.edge(parts[0], state) g.edge(state,", "= value['data'] return generate_react_flow_chart_from_df(df) def generate_react_flow_chart_from_df(df): column_names = df.columns nodes", "df[column_name].max() <= 0: edges.append([state, name1, 'one_way']) elif df[column_name].min() >= 0", "state = '_'.join(parts[1:])[6:-7] # print(name1, state, df[column_name].min(), # df[column_name].max()) if", "for column_name in column_names: if column_name[:6] == 'state_': nodes[column_name[6:]] =", "column_name.split('_') state = '_'.join(parts[1:])[6:-7] print(parts[0], state, df[column_name].min(), df[column_name].max()) if df[column_name].min()", "with open('modelflow.gv.json', 'r') as f: return json.load(f) else: g.view() def", "in column_names: if column_name[:6] != 'state_': parts = column_name.split('_') state", "kind='elipse') # Boxes for column_name in column_names: if column_name[:6] !=", "states.append((column_name[6:], column_name)) g.node(column_name[6:]) models = [] g.attr('node', shape='box') for column_name", "'state_': states.append((column_name[6:], column_name)) g.node(column_name[6:]) models = [] g.attr('node', shape='box') for", "g.node(column_name[6:]) models = [] g.attr('node', shape='box') for column_name in column_names:", "generate_react_flow_chart_from_df(df) def generate_react_flow_chart_from_df(df): column_names = df.columns nodes = {} #", "0: edges.append([name1, state, 'one_way']) else: edges.append([name1, state, 'both']) return dict(nodes=list(nodes.values()),", "parser = argparse.ArgumentParser(description='Generate Graph Viz') parser.add_argument('-f', '--output_file', type=str, help='The output", "'--output_file', type=str, help='The output file to generate a graph of',", "= [] g.attr('node', shape='ellipse') for column_name in column_names: if column_name[:6]", "df[column_name].min(), # df[column_name].max()) if df[column_name].min() < 0 and df[column_name].max() <=", "if column_name[:6] != 'state_': nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0], kind='box') edges =", "# generate_graph(df) generate_react_flow_chart_from_df(df) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Generate", "column_name in column_names: if column_name[:6] == 'state_': states.append((column_name[6:], column_name)) g.node(column_name[6:])", "edges.append([name1, state, 'one_way']) else: edges.append([name1, state, 'both']) return dict(nodes=list(nodes.values()), edges=edges)", "elif df[column_name].min() >= 0 and df[column_name].max() > 0: g.edge(parts[0], state)", "pd.DataFrame() for key, value in outputs['output_states'].items(): df[key] = value['data'] return", "argparse.ArgumentParser(description='Generate Graph Viz') parser.add_argument('-f', '--output_file', type=str, help='The output file to", "df[column_name].min() < 0 and df[column_name].max() <= 0: edges.append([state, name1, 'one_way'])", "else: g.edge(parts[0], state) g.edge(state, parts[0]) if graph_format == 'json': #", "DOES NOT WORK FOR MULTIPLE MODELFLOWS with open('modelflow.gv.json', 'r') as", "for key, value in outputs['output_states'].items(): df[key] = value['data'] return generate_react_flow_chart_from_df(df)", "= {} # Elipses for column_name in column_names: if column_name[:6]", "pandas as pd import argparse import json try: from graphviz", "for column_name in column_names: if column_name[:6] != 'state_': parts =", "'state_': models.append((column_name.split('_')[0], column_name)) g.node(column_name.split('_')[0]) for column_name in column_names: if column_name[:6]", "state, 'both']) return dict(nodes=list(nodes.values()), edges=edges) def main(args): df = pd.read_csv(args.output_file)", "column_name[:6] != 'state_': parts = column_name.split('_') state = '_'.join(parts[1:])[6:-7] print(parts[0],", "file to generate a graph of', required=True) args = parser.parse_args()", "Graph Viz') parser.add_argument('-f', '--output_file', type=str, help='The output file to generate", "g.attr(overlap='false') g.attr(splines='true') column_names = df.columns states = [] g.attr('node', shape='ellipse')", "key, value in outputs['output_states'].items(): df[key] = value['data'] return generate_react_flow_chart_from_df(df) def", "= [] g.attr('node', shape='box') for column_name in column_names: if column_name[:6]", "WORK FOR MULTIPLE MODELFLOWS with open('modelflow.gv.json', 'r') as f: return", "import json try: from graphviz import Digraph except: print(\"Note: Optional", "'_'.join(parts[1:])[6:-7] print(parts[0], state, df[column_name].min(), df[column_name].max()) if df[column_name].min() < 0 and", "pd.read_csv(args.output_file) # generate_graph(df) generate_react_flow_chart_from_df(df) if __name__ == '__main__': parser =", "else: g.view() def generate_react_flow_chart(outputs): df = pd.DataFrame() for key, value", "df[column_name].max()) if df[column_name].min() < 0 and df[column_name].max() <= 0: edges.append([state,", "if df[column_name].min() < 0 and df[column_name].max() <= 0: g.edge(state, parts[0])", "{} # Elipses for column_name in column_names: if column_name[:6] ==", "parts = column_name.split('_') name1 = parts[0] state = '_'.join(parts[1:])[6:-7] #", "argparse import json try: from graphviz import Digraph except: print(\"Note:", "models.append((column_name.split('_')[0], column_name)) g.node(column_name.split('_')[0]) for column_name in column_names: if column_name[:6] !=", "state) g.edge(state, parts[0]) if graph_format == 'json': # TODO: THIS", "== '__main__': parser = argparse.ArgumentParser(description='Generate Graph Viz') parser.add_argument('-f', '--output_file', type=str,", "df[column_name].min() < 0 and df[column_name].max() <= 0: g.edge(state, parts[0]) elif", "for column_name in column_names: if column_name[:6] != 'state_': nodes[column_name.split('_')[0]] =", "'state_': parts = column_name.split('_') name1 = parts[0] state = '_'.join(parts[1:])[6:-7]", "column_name.split('_') name1 = parts[0] state = '_'.join(parts[1:])[6:-7] # print(name1, state,", "json try: from graphviz import Digraph except: print(\"Note: Optional graphviz", "state, df[column_name].min(), df[column_name].max()) if df[column_name].min() < 0 and df[column_name].max() <=", "= dict(name=column_name[6:], kind='elipse') # Boxes for column_name in column_names: if", "Boxes for column_name in column_names: if column_name[:6] != 'state_': nodes[column_name.split('_')[0]]", "dict(name=column_name.split('_')[0], kind='box') edges = [] for column_name in column_names: if", "models = [] g.attr('node', shape='box') for column_name in column_names: if", "= dict(name=column_name.split('_')[0], kind='box') edges = [] for column_name in column_names:", "import Digraph except: print(\"Note: Optional graphviz not installed\") def generate_graph(df,", "generate_graph(df, graph_format='pdf'): g = Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format) g.attr(overlap='false') g.attr(splines='true')", "g = Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format) g.attr(overlap='false') g.attr(splines='true') column_names =", "in column_names: if column_name[:6] == 'state_': nodes[column_name[6:]] = dict(name=column_name[6:], kind='elipse')", "try: from graphviz import Digraph except: print(\"Note: Optional graphviz not", "g.attr(splines='true') column_names = df.columns states = [] g.attr('node', shape='ellipse') for", "as f: return json.load(f) else: g.view() def generate_react_flow_chart(outputs): df =", "graph_format == 'json': # TODO: THIS DOES NOT WORK FOR", "outputs['output_states'].items(): df[key] = value['data'] return generate_react_flow_chart_from_df(df) def generate_react_flow_chart_from_df(df): column_names =", "import pandas as pd import argparse import json try: from", "column_name[:6] == 'state_': states.append((column_name[6:], column_name)) g.node(column_name[6:]) models = [] g.attr('node',", "'state_': nodes[column_name[6:]] = dict(name=column_name[6:], kind='elipse') # Boxes for column_name in", "graphviz import Digraph except: print(\"Note: Optional graphviz not installed\") def", "[] g.attr('node', shape='box') for column_name in column_names: if column_name[:6] !=", "NOT WORK FOR MULTIPLE MODELFLOWS with open('modelflow.gv.json', 'r') as f:", "# Boxes for column_name in column_names: if column_name[:6] != 'state_':", "nodes[column_name[6:]] = dict(name=column_name[6:], kind='elipse') # Boxes for column_name in column_names:", "Viz') parser.add_argument('-f', '--output_file', type=str, help='The output file to generate a", "g.edge(parts[0], state) else: g.edge(parts[0], state) g.edge(state, parts[0]) if graph_format ==", "g.edge(state, parts[0]) if graph_format == 'json': # TODO: THIS DOES", "Digraph except: print(\"Note: Optional graphviz not installed\") def generate_graph(df, graph_format='pdf'):", "< 0 and df[column_name].max() <= 0: g.edge(state, parts[0]) elif df[column_name].min()", "print(\"Note: Optional graphviz not installed\") def generate_graph(df, graph_format='pdf'): g =", "== 'state_': nodes[column_name[6:]] = dict(name=column_name[6:], kind='elipse') # Boxes for column_name", "df = pd.read_csv(args.output_file) # generate_graph(df) generate_react_flow_chart_from_df(df) if __name__ == '__main__':", "kind='box') edges = [] for column_name in column_names: if column_name[:6]", "column_names: if column_name[:6] == 'state_': nodes[column_name[6:]] = dict(name=column_name[6:], kind='elipse') #", "df[column_name].min(), df[column_name].max()) if df[column_name].min() < 0 and df[column_name].max() <= 0:", "df.columns nodes = {} # Elipses for column_name in column_names:", "print(parts[0], state, df[column_name].min(), df[column_name].max()) if df[column_name].min() < 0 and df[column_name].max()", "df[column_name].max()) if df[column_name].min() < 0 and df[column_name].max() <= 0: g.edge(state,", "state, df[column_name].min(), # df[column_name].max()) if df[column_name].min() < 0 and df[column_name].max()", "'both']) return dict(nodes=list(nodes.values()), edges=edges) def main(args): df = pd.read_csv(args.output_file) #", "column_name in column_names: if column_name[:6] == 'state_': nodes[column_name[6:]] = dict(name=column_name[6:],", "= Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format) g.attr(overlap='false') g.attr(splines='true') column_names = df.columns", "return dict(nodes=list(nodes.values()), edges=edges) def main(args): df = pd.read_csv(args.output_file) # generate_graph(df)", "if column_name[:6] != 'state_': parts = column_name.split('_') state = '_'.join(parts[1:])[6:-7]", "json.load(f) else: g.view() def generate_react_flow_chart(outputs): df = pd.DataFrame() for key,", "type=str, help='The output file to generate a graph of', required=True)", "column_names = df.columns nodes = {} # Elipses for column_name", "== 'state_': states.append((column_name[6:], column_name)) g.node(column_name[6:]) models = [] g.attr('node', shape='box')", "THIS DOES NOT WORK FOR MULTIPLE MODELFLOWS with open('modelflow.gv.json', 'r')", "Elipses for column_name in column_names: if column_name[:6] == 'state_': nodes[column_name[6:]]", "installed\") def generate_graph(df, graph_format='pdf'): g = Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format)", "df[column_name].max() > 0: g.edge(parts[0], state) else: g.edge(parts[0], state) g.edge(state, parts[0])", "dict(name=column_name[6:], kind='elipse') # Boxes for column_name in column_names: if column_name[:6]", "and df[column_name].max() > 0: edges.append([name1, state, 'one_way']) else: edges.append([name1, state,", "<= 0: edges.append([state, name1, 'one_way']) elif df[column_name].min() >= 0 and", "0: edges.append([state, name1, 'one_way']) elif df[column_name].min() >= 0 and df[column_name].max()", "open('modelflow.gv.json', 'r') as f: return json.load(f) else: g.view() def generate_react_flow_chart(outputs):", "column_names = df.columns states = [] g.attr('node', shape='ellipse') for column_name", "elif df[column_name].min() >= 0 and df[column_name].max() > 0: edges.append([name1, state,", "'one_way']) else: edges.append([name1, state, 'both']) return dict(nodes=list(nodes.values()), edges=edges) def main(args):", "!= 'state_': nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0], kind='box') edges = [] for", "def generate_graph(df, graph_format='pdf'): g = Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format) g.attr(overlap='false')", "!= 'state_': parts = column_name.split('_') name1 = parts[0] state =", "parts[0]) elif df[column_name].min() >= 0 and df[column_name].max() > 0: g.edge(parts[0],", "in column_names: if column_name[:6] != 'state_': nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0], kind='box')", "edges = [] for column_name in column_names: if column_name[:6] !=", "else: edges.append([name1, state, 'both']) return dict(nodes=list(nodes.values()), edges=edges) def main(args): df", "[] g.attr('node', shape='ellipse') for column_name in column_names: if column_name[:6] ==", "0: g.edge(parts[0], state) else: g.edge(parts[0], state) g.edge(state, parts[0]) if graph_format", "column_name[:6] != 'state_': nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0], kind='box') edges = []", "in outputs['output_states'].items(): df[key] = value['data'] return generate_react_flow_chart_from_df(df) def generate_react_flow_chart_from_df(df): column_names", "column_names: if column_name[:6] == 'state_': states.append((column_name[6:], column_name)) g.node(column_name[6:]) models =", "column_name in column_names: if column_name[:6] != 'state_': parts = column_name.split('_')", "g.edge(state, parts[0]) elif df[column_name].min() >= 0 and df[column_name].max() > 0:" ]
[ "os.path.exists( filename ) : old_month -= 1 # 調査用に+1してあるので、実際の値は、これにold_monthに-1したものとなる。 break", ": return None #df = pd.DataFrame({ # 'start': [None, None],", "os import json import datetime import pandas as pd from", "break labels.append( m_year + \"/\" + m_month ) fileNameList.append( filename", "base.ColorPrint as CPrint import command.voice_log.Config_Main as CSetting def most_old_Month() :", ": float = (b_time - a_time).total_seconds() #print( \"time : \"", "IDlist , Namelist members_IDlist , members_Namelist = getID(members=members) if members_IDlist", "(list[int]): 役職ID mode (string): ユーザーを示すものは、何か?(UserName or ID) return: pd.DataFrame: 計算済みデータ", "Namelist = [] for member in members : IDlist.append( member.id", ": df_dict[\"time\"][indexNum] += 0.0 else : df_dict[\"time\"][indexNum] += time #", "= json.load(f) except : CPrint.error_print(\"JSONではありません\") import traceback traceback.print_exc() return None", ": data += role_item.members return data async def makeTimeList( client:", "True : filetime = datetime.datetime.today() - relativedelta(months=old_month) m_month = datetime.datetime.strftime(filetime,'%m')", "= [] for member in members : IDlist.append( member.id )", ": # とりあえず、月初めに入室した扱いにする(他の方法も検討中。そもそも入室してない扱いetc..) tmp_startTime = datetime.now().strftime(\"%Y/%m/01 00:00:00\") df_dict[\"start\"][indexNum] = tmp_startTime", "is None : # とりあえず、月初めに入室した扱いにする(他の方法も検討中。そもそも入室してない扱いetc..) tmp_startTime = datetime.now().strftime(\"%Y/%m/01 00:00:00\") df_dict[\"start\"][indexNum]", "return IDlist , Namelist members_IDlist , members_Namelist = getID(members=members) if", ", Namelist members_IDlist , members_Namelist = getID(members=members) if members_IDlist is", "pprint import base.ColorPrint as CPrint import command.voice_log.Config_Main as CSetting def", "df else : df = df.drop(columns=['name']) all_df = pd.merge(all_df, df", "members_IDlist.index(item[\"member.id\"]) except ValueError as error : # 現在の鯖に、存在しない人は処理しない。 continue if", "DataFrameに変更 df = pd.DataFrame(df_dict, index=members_IDlist ) # 作業用の\"start\"と\"end\"を削除 df =", "1 return old_month , labels , fileNameList async def makeOldTimeList(", "\"/\" + m_month ) fileNameList.append( filename ) old_month += 1", "len(members), 'exit': [None] * len(members), 'time': [0.0] * len(members), }", "try : indexNum = members_IDlist.index(item[\"member.id\"]) except ValueError as error :", "= datetime.datetime.today() - relativedelta(months=old_month) m_month = datetime.datetime.strftime(filetime,'%m') m_year = datetime.datetime.strftime(filetime,'%Y')", ": CPrint.error_print(\"JSONではありません\") import traceback traceback.print_exc() return None if orig_TimeData is", "as error : # 現在の鯖に、存在しない人は処理しない。 continue if item[\"Flag\"] == \"entry\"", "for guild_item in client.guilds : # ギルドデータ更新 await guild_item.chunk() #", "# ロール制限がなければ、全員分を取ってくる if len(RoleList) == 0 : data += guild_item.members", "None or members_IDlist == [] : return None # JSON取得", "dict try : with open( Datafile_path ) as f: orig_TimeData", "len(members), } # 計算 for item in orig_TimeData : try", "CSetting.JSONPATH_row + m_year + m_month + \".json\" if not os.path.exists(", "= getID(members=members) if members_IDlist is None or members_IDlist == []", "# ギルドデータ更新 await guild_item.chunk() # ロール制限がなければ、全員分を取ってくる if len(RoleList) == 0", "orig_TimeData : try : indexNum = members_IDlist.index(item[\"member.id\"]) except ValueError as", "= df.drop(columns=['start','exit']) # 計算 df[\"time\"] = df[\"time\"] / 60 /", "else : df = df.drop(columns=['name']) all_df = pd.merge(all_df, df ,", "CSetting.OneMonthOutput_RoleID ): all_df = None for fileName in MonthFileList :", "traceback.print_exc() return None if orig_TimeData is None : return None", "labels = [] fileNameList = [] while True : filetime", "Datafile_path: str , RoleList: list[int]): \"\"\" [VC] 生のログデータを計算して、表にして返す。 Args: client", "guild_item.members continue # ロール制限がなければ、該当ロール部を取ってくる for role_item in guild_item.roles : if", "members_Namelist = getID(members=members) if members_IDlist is None or members_IDlist ==", "#print( \"time : \" + str(time) ) if time <", "a_time = datetime.datetime.strptime( df_dict[\"start\"][indexNum] , '%Y/%m/%d %H:%M:%S') b_time = datetime.datetime.strptime(", "list[discord.Member]: 指定ロールに参加しているメンバー \"\"\" data = [] for guild_item in client.guilds", ": # スタートがないのに、エンドがある場合 if df_dict[\"start\"][indexNum] is None : # とりあえず、月初めに入室した扱いにする(他の方法も検討中。そもそも入室してない扱いetc..)", ", fileNameList async def makeOldTimeList( client: discord.Client, MonthFileList:list[str] , IndexLabel:list[str],", "= datetime.datetime.strptime( df_dict[\"exit\"][indexNum] , '%Y/%m/%d %H:%M:%S') time : float =", ": dict try : with open( Datafile_path ) as f:", "if df is None : break labelname = IndexLabel[MonthFileList.index(fileName)] df", "ID) return: pd.DataFrame: 計算済みデータ \"\"\" # ユーザーリスト取得 members = await", "計算 for item in orig_TimeData : try : indexNum =", "RoleList=RoleList) #print( \"test1\" ) pprint( df ) if df is", "\"\"\" # ユーザーリスト取得 members = await UserRoleMember(client, RoleList) # IDだけ抽出", "'time': [13, 23]}, # index=['ONE', 'TWO'] #) df_dict = {", "import command.voice_log.Config_Main as CSetting def most_old_Month() : old_month = 1", "def makeTimeList( client: discord.Client, Datafile_path: str , RoleList: list[int]): \"\"\"", "guild_item.chunk() # ロール制限がなければ、全員分を取ってくる if len(RoleList) == 0 : data +=", "or members_IDlist == [] : return None # JSON取得 orig_TimeData", "filetime = datetime.datetime.today() - relativedelta(months=old_month) m_month = datetime.datetime.strftime(filetime,'%m') m_year =", "def getID(members: list[discord.Member]): IDlist = [] Namelist = [] for", "df_dict[\"time\"][indexNum] += time # DataFrameに変更 df = pd.DataFrame(df_dict, index=members_IDlist )", "= df.drop(columns=['name']) all_df = pd.merge(all_df, df , left_index=True, right_index=True) #all_df", "a_time).total_seconds() #print( \"time : \" + str(time) ) if time", "index=members_IDlist ) # 作業用の\"start\"と\"end\"を削除 df = df.drop(columns=['start','exit']) # 計算 df[\"time\"]", "\"\"\" [VC] 指定ロールに参加しているメンバーを抽出する Args: client (discord.Client): クライアント RoleList (list[int]): 役職ID", "len(RoleList) == 0 : data += guild_item.members continue # ロール制限がなければ、該当ロール部を取ってくる", "df_dict[\"start\"][indexNum] = tmp_startTime # -- df_dict[\"exit\"][indexNum] = item[\"time\"] # 差分計算", "item[\"Flag\"] == \"entry\" : df_dict[\"start\"][indexNum] = item[\"time\"] if item[\"Flag\"] ==", "item[\"Flag\"] == \"exit\" : # スタートがないのに、エンドがある場合 if df_dict[\"start\"][indexNum] is None", "orig_TimeData is None : return None #df = pd.DataFrame({ #", "member.name + \"#\" + member.discriminator ) return IDlist , Namelist", "MonthFileList.index(fileName) == 0 : all_df = df else : df", "df.drop(columns=['name']) all_df = pd.merge(all_df, df , left_index=True, right_index=True) #all_df =", ": IDlist.append( member.id ) Namelist.append( member.name + \"#\" + member.discriminator", ", RoleList=RoleList) #print( \"test1\" ) pprint( df ) if df", "if df_dict[\"start\"][indexNum] is None : # とりあえず、月初めに入室した扱いにする(他の方法も検討中。そもそも入室してない扱いetc..) tmp_startTime = datetime.now().strftime(\"%Y/%m/01", "str(time) ) if time < 0.0 : df_dict[\"time\"][indexNum] += 0.0", "= datetime.datetime.strptime( df_dict[\"start\"][indexNum] , '%Y/%m/%d %H:%M:%S') b_time = datetime.datetime.strptime( df_dict[\"exit\"][indexNum]", "for role_item in guild_item.roles : if role_item.id in RoleList :", "indexNum = members_IDlist.index(item[\"member.id\"]) except ValueError as error : # 現在の鯖に、存在しない人は処理しない。", "left_index=True, right_index=True) #all_df = pd.merge(all_df, df , left_index=True) #df.loc[:,[labelname]] #pprint(all_df)", "- a_time).total_seconds() #print( \"time : \" + str(time) ) if", "continue if item[\"Flag\"] == \"entry\" : df_dict[\"start\"][indexNum] = item[\"time\"] if", "is None : return None #df = pd.DataFrame({ # 'start':", "RoleList: list[int] = CSetting.OneMonthOutput_RoleID ): all_df = None for fileName", "client: discord.Client, Datafile_path: str , RoleList: list[int]): \"\"\" [VC] 生のログデータを計算して、表にして返す。", "None : # とりあえず、月初めに入室した扱いにする(他の方法も検討中。そもそも入室してない扱いetc..) tmp_startTime = datetime.now().strftime(\"%Y/%m/01 00:00:00\") df_dict[\"start\"][indexNum] =", "+ CSetting.JSONPATH_row + m_year + m_month + \".json\" if not", "right_index=True) #all_df = pd.merge(all_df, df , left_index=True) #df.loc[:,[labelname]] #pprint(all_df) return", "makeTimeList( client: discord.Client, Datafile_path: str , RoleList: list[int]): \"\"\" [VC]", "クライアント RoleList (list[int]): 役職ID mode (string): ユーザーを示すものは、何か?(UserName or ID) return:", "= pd.DataFrame(df_dict, index=members_IDlist ) # 作業用の\"start\"と\"end\"を削除 df = df.drop(columns=['start','exit']) #", "old_month = 1 labels = [] fileNameList = [] while", "+ m_month ) fileNameList.append( filename ) old_month += 1 return", "+= 0.0 else : df_dict[\"time\"][indexNum] += time # DataFrameに変更 df", "client: discord.Client, RoleList: list[int] ) : \"\"\" [VC] 指定ロールに参加しているメンバーを抽出する Args:", "df , left_index=True, right_index=True) #all_df = pd.merge(all_df, df , left_index=True)", "= tmp_startTime # -- df_dict[\"exit\"][indexNum] = item[\"time\"] # 差分計算 a_time", "df_dict[\"time\"][indexNum] += 0.0 else : df_dict[\"time\"][indexNum] += time # DataFrameに変更", "[] while True : filetime = datetime.datetime.today() - relativedelta(months=old_month) m_month", "CPrint import command.voice_log.Config_Main as CSetting def most_old_Month() : old_month =", "UserRoleMember( client: discord.Client, RoleList: list[int] ) : \"\"\" [VC] 指定ロールに参加しているメンバーを抽出する", "[VC] 指定ロールに参加しているメンバーを抽出する Args: client (discord.Client): クライアント RoleList (list[int]): 役職ID return:", "data async def makeTimeList( client: discord.Client, Datafile_path: str , RoleList:", "df[\"time\"] = df[\"time\"] / 60 / 60 #pprint(df) return df", "0.0 : df_dict[\"time\"][indexNum] += 0.0 else : df_dict[\"time\"][indexNum] += time", "指定ロールに参加しているメンバーを抽出する Args: client (discord.Client): クライアント RoleList (list[int]): 役職ID return: list[discord.Member]:", ") : old_month -= 1 # 調査用に+1してあるので、実際の値は、これにold_monthに-1したものとなる。 break labels.append( m_year", "\"#\" + member.discriminator ) return IDlist , Namelist members_IDlist ,", "for item in orig_TimeData : try : indexNum = members_IDlist.index(item[\"member.id\"])", "old_month += 1 return old_month , labels , fileNameList async", "continue # ロール制限がなければ、該当ロール部を取ってくる for role_item in guild_item.roles : if role_item.id", "as CSetting def most_old_Month() : old_month = 1 labels =", "import pandas as pd from dateutil.relativedelta import relativedelta from pprint", ") pprint( df ) if df is None : break", "await guild_item.chunk() # ロール制限がなければ、全員分を取ってくる if len(RoleList) == 0 : data", "#df.loc[:,[labelname]] #pprint(all_df) return all_df async def UserRoleMember( client: discord.Client, RoleList:", "RoleList: list[int] ) : \"\"\" [VC] 指定ロールに参加しているメンバーを抽出する Args: client (discord.Client):", ": df_dict[\"start\"][indexNum] = item[\"time\"] if item[\"Flag\"] == \"exit\" : #", "+= role_item.members return data async def makeTimeList( client: discord.Client, Datafile_path:", "+= time # DataFrameに変更 df = pd.DataFrame(df_dict, index=members_IDlist ) #", "'%Y/%m/%d %H:%M:%S') time : float = (b_time - a_time).total_seconds() #print(", "import discord import os import json import datetime import pandas", "'exit': [None] * len(members), 'time': [0.0] * len(members), } #", "== \"exit\" : # スタートがないのに、エンドがある場合 if df_dict[\"start\"][indexNum] is None :", "とりあえず、月初めに入室した扱いにする(他の方法も検討中。そもそも入室してない扱いetc..) tmp_startTime = datetime.now().strftime(\"%Y/%m/01 00:00:00\") df_dict[\"start\"][indexNum] = tmp_startTime # --", "client.guilds : # ギルドデータ更新 await guild_item.chunk() # ロール制限がなければ、全員分を取ってくる if len(RoleList)", "= datetime.datetime.strftime(filetime,'%m') m_year = datetime.datetime.strftime(filetime,'%Y') filename = CSetting.baseLogFolder + CSetting.JSONPATH_row", "# DataFrameに変更 df = pd.DataFrame(df_dict, index=members_IDlist ) # 作業用の\"start\"と\"end\"を削除 df", ") return IDlist , Namelist members_IDlist , members_Namelist = getID(members=members)", "await UserRoleMember(client, RoleList) # IDだけ抽出 def getID(members: list[discord.Member]): IDlist =", "= CSetting.OneMonthOutput_RoleID ): all_df = None for fileName in MonthFileList", "Args: client (discord.Client): クライアント RoleList (list[int]): 役職ID mode (string): ユーザーを示すものは、何か?(UserName", "Datafile_path ) as f: orig_TimeData = json.load(f) except : CPrint.error_print(\"JSONではありません\")", "for fileName in MonthFileList : df = await makeTimeList( client,", "members_IDlist , members_Namelist = getID(members=members) if members_IDlist is None or", "await makeTimeList( client, Datafile_path=fileName , RoleList=RoleList) #print( \"test1\" ) pprint(", "'TWO'] #) df_dict = { 'name': members_Namelist, 'start': [None] *", "+ member.discriminator ) return IDlist , Namelist members_IDlist , members_Namelist", "* len(members), } # 計算 for item in orig_TimeData :", "None : return None #df = pd.DataFrame({ # 'start': [None,", "else : df_dict[\"time\"][indexNum] += time # DataFrameに変更 df = pd.DataFrame(df_dict,", "item[\"time\"] # 差分計算 a_time = datetime.datetime.strptime( df_dict[\"start\"][indexNum] , '%Y/%m/%d %H:%M:%S')", "old_month -= 1 # 調査用に+1してあるので、実際の値は、これにold_monthに-1したものとなる。 break labels.append( m_year + \"/\"", "client, Datafile_path=fileName , RoleList=RoleList) #print( \"test1\" ) pprint( df )", "pd.merge(all_df, df , left_index=True, right_index=True) #all_df = pd.merge(all_df, df ,", "Namelist.append( member.name + \"#\" + member.discriminator ) return IDlist ,", "open( Datafile_path ) as f: orig_TimeData = json.load(f) except :", "計算済みデータ \"\"\" # ユーザーリスト取得 members = await UserRoleMember(client, RoleList) #", "pprint import pprint import base.ColorPrint as CPrint import command.voice_log.Config_Main as", "index=['ONE', 'TWO'] #) df_dict = { 'name': members_Namelist, 'start': [None]", "data += role_item.members return data async def makeTimeList( client: discord.Client,", "tmp_startTime = datetime.now().strftime(\"%Y/%m/01 00:00:00\") df_dict[\"start\"][indexNum] = tmp_startTime # -- df_dict[\"exit\"][indexNum]", "if MonthFileList.index(fileName) == 0 : all_df = df else :", "role_item in guild_item.roles : if role_item.id in RoleList : data", "import pprint import base.ColorPrint as CPrint import command.voice_log.Config_Main as CSetting", "fileNameList.append( filename ) old_month += 1 return old_month , labels", "async def makeTimeList( client: discord.Client, Datafile_path: str , RoleList: list[int]):", "discord.Client, RoleList: list[int] ) : \"\"\" [VC] 指定ロールに参加しているメンバーを抽出する Args: client", ": with open( Datafile_path ) as f: orig_TimeData = json.load(f)", "while True : filetime = datetime.datetime.today() - relativedelta(months=old_month) m_month =", "old_month , labels , fileNameList async def makeOldTimeList( client: discord.Client,", "df_dict[\"start\"][indexNum] is None : # とりあえず、月初めに入室した扱いにする(他の方法も検討中。そもそも入室してない扱いetc..) tmp_startTime = datetime.now().strftime(\"%Y/%m/01 00:00:00\")", "23]}, # index=['ONE', 'TWO'] #) df_dict = { 'name': members_Namelist,", "IndexLabel[MonthFileList.index(fileName)] df = df.rename(columns={'time': labelname }) if MonthFileList.index(fileName) == 0", "as f: orig_TimeData = json.load(f) except : CPrint.error_print(\"JSONではありません\") import traceback", ": \" + str(time) ) if time < 0.0 :", "item[\"time\"] if item[\"Flag\"] == \"exit\" : # スタートがないのに、エンドがある場合 if df_dict[\"start\"][indexNum]", "fileNameList async def makeOldTimeList( client: discord.Client, MonthFileList:list[str] , IndexLabel:list[str], RoleList:", "\"\"\" [VC] 生のログデータを計算して、表にして返す。 Args: client (discord.Client): クライアント RoleList (list[int]): 役職ID", "float = (b_time - a_time).total_seconds() #print( \"time : \" +", "return None if orig_TimeData is None : return None #df", "+ m_year + m_month + \".json\" if not os.path.exists( filename", "relativedelta(months=old_month) m_month = datetime.datetime.strftime(filetime,'%m') m_year = datetime.datetime.strftime(filetime,'%Y') filename = CSetting.baseLogFolder", "m_month + \".json\" if not os.path.exists( filename ) : old_month", "# 差分計算 a_time = datetime.datetime.strptime( df_dict[\"start\"][indexNum] , '%Y/%m/%d %H:%M:%S') b_time", "with open( Datafile_path ) as f: orig_TimeData = json.load(f) except", "time : float = (b_time - a_time).total_seconds() #print( \"time :", "# ユーザーリスト取得 members = await UserRoleMember(client, RoleList) # IDだけ抽出 def", "if role_item.id in RoleList : data += role_item.members return data", "(discord.Client): クライアント RoleList (list[int]): 役職ID mode (string): ユーザーを示すものは、何か?(UserName or ID)", "m_month = datetime.datetime.strftime(filetime,'%m') m_year = datetime.datetime.strftime(filetime,'%Y') filename = CSetting.baseLogFolder +", "datetime.datetime.strftime(filetime,'%Y') filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + m_year + m_month", "+ m_month + \".json\" if not os.path.exists( filename ) :", "\" + str(time) ) if time < 0.0 : df_dict[\"time\"][indexNum]", "fileNameList = [] while True : filetime = datetime.datetime.today() -", "members = await UserRoleMember(client, RoleList) # IDだけ抽出 def getID(members: list[discord.Member]):", ") if df is None : break labelname = IndexLabel[MonthFileList.index(fileName)]", "調査用に+1してあるので、実際の値は、これにold_monthに-1したものとなる。 break labels.append( m_year + \"/\" + m_month ) fileNameList.append(", "= pd.DataFrame({ # 'start': [None, None], # 'end': [None, None],", "return None # JSON取得 orig_TimeData : dict try : with", "= df.rename(columns={'time': labelname }) if MonthFileList.index(fileName) == 0 : all_df", "JSON取得 orig_TimeData : dict try : with open( Datafile_path )", ": try : indexNum = members_IDlist.index(item[\"member.id\"]) except ValueError as error", "return all_df async def UserRoleMember( client: discord.Client, RoleList: list[int] )", "\"entry\" : df_dict[\"start\"][indexNum] = item[\"time\"] if item[\"Flag\"] == \"exit\" :", ": old_month -= 1 # 調査用に+1してあるので、実際の値は、これにold_monthに-1したものとなる。 break labels.append( m_year +", "クライアント RoleList (list[int]): 役職ID return: list[discord.Member]: 指定ロールに参加しているメンバー \"\"\" data =", "in members : IDlist.append( member.id ) Namelist.append( member.name + \"#\"", "= item[\"time\"] if item[\"Flag\"] == \"exit\" : # スタートがないのに、エンドがある場合 if", "CSetting.baseLogFolder + CSetting.JSONPATH_row + m_year + m_month + \".json\" if", ") fileNameList.append( filename ) old_month += 1 return old_month ,", ") if time < 0.0 : df_dict[\"time\"][indexNum] += 0.0 else", "m_year = datetime.datetime.strftime(filetime,'%Y') filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + m_year", "df is None : break labelname = IndexLabel[MonthFileList.index(fileName)] df =", "import datetime import pandas as pd from dateutil.relativedelta import relativedelta", "import os import json import datetime import pandas as pd", ", left_index=True) #df.loc[:,[labelname]] #pprint(all_df) return all_df async def UserRoleMember( client:", "# ロール制限がなければ、該当ロール部を取ってくる for role_item in guild_item.roles : if role_item.id in", "計算 df[\"time\"] = df[\"time\"] / 60 / 60 #pprint(df) return", "discord import os import json import datetime import pandas as", "from dateutil.relativedelta import relativedelta from pprint import pprint import base.ColorPrint", "pd.DataFrame(df_dict, index=members_IDlist ) # 作業用の\"start\"と\"end\"を削除 df = df.drop(columns=['start','exit']) # 計算", "# IDだけ抽出 def getID(members: list[discord.Member]): IDlist = [] Namelist =", "[] : return None # JSON取得 orig_TimeData : dict try", "if members_IDlist is None or members_IDlist == [] : return", "IDlist = [] Namelist = [] for member in members", "= datetime.now().strftime(\"%Y/%m/01 00:00:00\") df_dict[\"start\"][indexNum] = tmp_startTime # -- df_dict[\"exit\"][indexNum] =", "df.drop(columns=['start','exit']) # 計算 df[\"time\"] = df[\"time\"] / 60 / 60", ", '%Y/%m/%d %H:%M:%S') b_time = datetime.datetime.strptime( df_dict[\"exit\"][indexNum] , '%Y/%m/%d %H:%M:%S')", "0 : all_df = df else : df = df.drop(columns=['name'])", "orig_TimeData : dict try : with open( Datafile_path ) as", "discord.Client, Datafile_path: str , RoleList: list[int]): \"\"\" [VC] 生のログデータを計算して、表にして返す。 Args:", "if len(RoleList) == 0 : data += guild_item.members continue #", "in orig_TimeData : try : indexNum = members_IDlist.index(item[\"member.id\"]) except ValueError", "[VC] 生のログデータを計算して、表にして返す。 Args: client (discord.Client): クライアント RoleList (list[int]): 役職ID mode", ") old_month += 1 return old_month , labels , fileNameList", "[] for guild_item in client.guilds : # ギルドデータ更新 await guild_item.chunk()", "# 計算 for item in orig_TimeData : try : indexNum", "1 labels = [] fileNameList = [] while True :", "(list[int]): 役職ID return: list[discord.Member]: 指定ロールに参加しているメンバー \"\"\" data = [] for", ": # ギルドデータ更新 await guild_item.chunk() # ロール制限がなければ、全員分を取ってくる if len(RoleList) ==", "# 作業用の\"start\"と\"end\"を削除 df = df.drop(columns=['start','exit']) # 計算 df[\"time\"] = df[\"time\"]", ", RoleList: list[int]): \"\"\" [VC] 生のログデータを計算して、表にして返す。 Args: client (discord.Client): クライアント", "as pd from dateutil.relativedelta import relativedelta from pprint import pprint", "async def UserRoleMember( client: discord.Client, RoleList: list[int] ) : \"\"\"", "df = df.drop(columns=['start','exit']) # 計算 df[\"time\"] = df[\"time\"] / 60", "スタートがないのに、エンドがある場合 if df_dict[\"start\"][indexNum] is None : # とりあえず、月初めに入室した扱いにする(他の方法も検討中。そもそも入室してない扱いetc..) tmp_startTime =", ", left_index=True, right_index=True) #all_df = pd.merge(all_df, df , left_index=True) #df.loc[:,[labelname]]", "MonthFileList:list[str] , IndexLabel:list[str], RoleList: list[int] = CSetting.OneMonthOutput_RoleID ): all_df =", "IDだけ抽出 def getID(members: list[discord.Member]): IDlist = [] Namelist = []", "(b_time - a_time).total_seconds() #print( \"time : \" + str(time) )", "df ) if df is None : break labelname =", "'time': [0.0] * len(members), } # 計算 for item in", "filename ) old_month += 1 return old_month , labels ,", "labels.append( m_year + \"/\" + m_month ) fileNameList.append( filename )", "RoleList (list[int]): 役職ID mode (string): ユーザーを示すものは、何か?(UserName or ID) return: pd.DataFrame:", "guild_item in client.guilds : # ギルドデータ更新 await guild_item.chunk() # ロール制限がなければ、全員分を取ってくる", ": data += guild_item.members continue # ロール制限がなければ、該当ロール部を取ってくる for role_item in", "RoleList : data += role_item.members return data async def makeTimeList(", "Args: client (discord.Client): クライアント RoleList (list[int]): 役職ID return: list[discord.Member]: 指定ロールに参加しているメンバー", "df = await makeTimeList( client, Datafile_path=fileName , RoleList=RoleList) #print( \"test1\"", "None if orig_TimeData is None : return None #df =", "all_df = pd.merge(all_df, df , left_index=True, right_index=True) #all_df = pd.merge(all_df,", "\"test1\" ) pprint( df ) if df is None :", "'%Y/%m/%d %H:%M:%S') b_time = datetime.datetime.strptime( df_dict[\"exit\"][indexNum] , '%Y/%m/%d %H:%M:%S') time", "df_dict[\"exit\"][indexNum] = item[\"time\"] # 差分計算 a_time = datetime.datetime.strptime( df_dict[\"start\"][indexNum] ,", "< 0.0 : df_dict[\"time\"][indexNum] += 0.0 else : df_dict[\"time\"][indexNum] +=", "'start': [None, None], # 'end': [None, None], # 'time': [13,", "指定ロールに参加しているメンバー \"\"\" data = [] for guild_item in client.guilds :", "[None, None], # 'time': [13, 23]}, # index=['ONE', 'TWO'] #)", "[13, 23]}, # index=['ONE', 'TWO'] #) df_dict = { 'name':", "if time < 0.0 : df_dict[\"time\"][indexNum] += 0.0 else :", "ロール制限がなければ、該当ロール部を取ってくる for role_item in guild_item.roles : if role_item.id in RoleList", "relativedelta from pprint import pprint import base.ColorPrint as CPrint import", "'name': members_Namelist, 'start': [None] * len(members), 'exit': [None] * len(members),", ": indexNum = members_IDlist.index(item[\"member.id\"]) except ValueError as error : #", "return: pd.DataFrame: 計算済みデータ \"\"\" # ユーザーリスト取得 members = await UserRoleMember(client,", "all_df = df else : df = df.drop(columns=['name']) all_df =", "m_year + \"/\" + m_month ) fileNameList.append( filename ) old_month", "df = df.rename(columns={'time': labelname }) if MonthFileList.index(fileName) == 0 :", "import base.ColorPrint as CPrint import command.voice_log.Config_Main as CSetting def most_old_Month()", "b_time = datetime.datetime.strptime( df_dict[\"exit\"][indexNum] , '%Y/%m/%d %H:%M:%S') time : float", "df_dict[\"exit\"][indexNum] , '%Y/%m/%d %H:%M:%S') time : float = (b_time -", "= await UserRoleMember(client, RoleList) # IDだけ抽出 def getID(members: list[discord.Member]): IDlist", "most_old_Month() : old_month = 1 labels = [] fileNameList =", "== \"entry\" : df_dict[\"start\"][indexNum] = item[\"time\"] if item[\"Flag\"] == \"exit\"", "datetime.datetime.strftime(filetime,'%m') m_year = datetime.datetime.strftime(filetime,'%Y') filename = CSetting.baseLogFolder + CSetting.JSONPATH_row +", "[] fileNameList = [] while True : filetime = datetime.datetime.today()", "makeOldTimeList( client: discord.Client, MonthFileList:list[str] , IndexLabel:list[str], RoleList: list[int] = CSetting.OneMonthOutput_RoleID", "break labelname = IndexLabel[MonthFileList.index(fileName)] df = df.rename(columns={'time': labelname }) if", "= await makeTimeList( client, Datafile_path=fileName , RoleList=RoleList) #print( \"test1\" )", "# JSON取得 orig_TimeData : dict try : with open( Datafile_path", "pd from dateutil.relativedelta import relativedelta from pprint import pprint import", ": df = df.drop(columns=['name']) all_df = pd.merge(all_df, df , left_index=True,", ": return None # JSON取得 orig_TimeData : dict try :", "client (discord.Client): クライアント RoleList (list[int]): 役職ID mode (string): ユーザーを示すものは、何か?(UserName or", "fileName in MonthFileList : df = await makeTimeList( client, Datafile_path=fileName", "members_Namelist, 'start': [None] * len(members), 'exit': [None] * len(members), 'time':", "all_df async def UserRoleMember( client: discord.Client, RoleList: list[int] ) :", "%H:%M:%S') b_time = datetime.datetime.strptime( df_dict[\"exit\"][indexNum] , '%Y/%m/%d %H:%M:%S') time :", ") # 作業用の\"start\"と\"end\"を削除 df = df.drop(columns=['start','exit']) # 計算 df[\"time\"] =", "member.discriminator ) return IDlist , Namelist members_IDlist , members_Namelist =", "labels , fileNameList async def makeOldTimeList( client: discord.Client, MonthFileList:list[str] ,", "not os.path.exists( filename ) : old_month -= 1 # 調査用に+1してあるので、実際の値は、これにold_monthに-1したものとなる。", "members : IDlist.append( member.id ) Namelist.append( member.name + \"#\" +", "+ \"/\" + m_month ) fileNameList.append( filename ) old_month +=", "role_item.id in RoleList : data += role_item.members return data async", "if item[\"Flag\"] == \"exit\" : # スタートがないのに、エンドがある場合 if df_dict[\"start\"][indexNum] is", "None #df = pd.DataFrame({ # 'start': [None, None], # 'end':", "pd.DataFrame({ # 'start': [None, None], # 'end': [None, None], #", "member.id ) Namelist.append( member.name + \"#\" + member.discriminator ) return", "ユーザーを示すものは、何か?(UserName or ID) return: pd.DataFrame: 計算済みデータ \"\"\" # ユーザーリスト取得 members", ") Namelist.append( member.name + \"#\" + member.discriminator ) return IDlist", "{ 'name': members_Namelist, 'start': [None] * len(members), 'exit': [None] *", "[None] * len(members), 'time': [0.0] * len(members), } # 計算", "list[int] = CSetting.OneMonthOutput_RoleID ): all_df = None for fileName in", "[] for member in members : IDlist.append( member.id ) Namelist.append(", "client (discord.Client): クライアント RoleList (list[int]): 役職ID return: list[discord.Member]: 指定ロールに参加しているメンバー \"\"\"", ": \"\"\" [VC] 指定ロールに参加しているメンバーを抽出する Args: client (discord.Client): クライアント RoleList (list[int]):", "#pprint(all_df) return all_df async def UserRoleMember( client: discord.Client, RoleList: list[int]", "[0.0] * len(members), } # 計算 for item in orig_TimeData", "return old_month , labels , fileNameList async def makeOldTimeList( client:", "time # DataFrameに変更 df = pd.DataFrame(df_dict, index=members_IDlist ) # 作業用の\"start\"と\"end\"を削除", "IndexLabel:list[str], RoleList: list[int] = CSetting.OneMonthOutput_RoleID ): all_df = None for", "* len(members), 'time': [0.0] * len(members), } # 計算 for", "= IndexLabel[MonthFileList.index(fileName)] df = df.rename(columns={'time': labelname }) if MonthFileList.index(fileName) ==", "command.voice_log.Config_Main as CSetting def most_old_Month() : old_month = 1 labels", "datetime.datetime.strptime( df_dict[\"start\"][indexNum] , '%Y/%m/%d %H:%M:%S') b_time = datetime.datetime.strptime( df_dict[\"exit\"][indexNum] ,", "= (b_time - a_time).total_seconds() #print( \"time : \" + str(time)", "役職ID return: list[discord.Member]: 指定ロールに参加しているメンバー \"\"\" data = [] for guild_item", "df_dict[\"start\"][indexNum] = item[\"time\"] if item[\"Flag\"] == \"exit\" : # スタートがないのに、エンドがある場合", "is None or members_IDlist == [] : return None #", "- relativedelta(months=old_month) m_month = datetime.datetime.strftime(filetime,'%m') m_year = datetime.datetime.strftime(filetime,'%Y') filename =", "client: discord.Client, MonthFileList:list[str] , IndexLabel:list[str], RoleList: list[int] = CSetting.OneMonthOutput_RoleID ):", "生のログデータを計算して、表にして返す。 Args: client (discord.Client): クライアント RoleList (list[int]): 役職ID mode (string):", "): all_df = None for fileName in MonthFileList : df", "#df = pd.DataFrame({ # 'start': [None, None], # 'end': [None,", "mode (string): ユーザーを示すものは、何か?(UserName or ID) return: pd.DataFrame: 計算済みデータ \"\"\" #", "差分計算 a_time = datetime.datetime.strptime( df_dict[\"start\"][indexNum] , '%Y/%m/%d %H:%M:%S') b_time =", "ロール制限がなければ、全員分を取ってくる if len(RoleList) == 0 : data += guild_item.members continue", "df.rename(columns={'time': labelname }) if MonthFileList.index(fileName) == 0 : all_df =", "try : with open( Datafile_path ) as f: orig_TimeData =", ", labels , fileNameList async def makeOldTimeList( client: discord.Client, MonthFileList:list[str]", "# 現在の鯖に、存在しない人は処理しない。 continue if item[\"Flag\"] == \"entry\" : df_dict[\"start\"][indexNum] =", "Namelist members_IDlist , members_Namelist = getID(members=members) if members_IDlist is None", "error : # 現在の鯖に、存在しない人は処理しない。 continue if item[\"Flag\"] == \"entry\" :", "+ \"#\" + member.discriminator ) return IDlist , Namelist members_IDlist", "orig_TimeData = json.load(f) except : CPrint.error_print(\"JSONではありません\") import traceback traceback.print_exc() return", "None for fileName in MonthFileList : df = await makeTimeList(", "in RoleList : data += role_item.members return data async def", ", '%Y/%m/%d %H:%M:%S') time : float = (b_time - a_time).total_seconds()", "+ \".json\" if not os.path.exists( filename ) : old_month -=", "df = pd.DataFrame(df_dict, index=members_IDlist ) # 作業用の\"start\"と\"end\"を削除 df = df.drop(columns=['start','exit'])", "data += guild_item.members continue # ロール制限がなければ、該当ロール部を取ってくる for role_item in guild_item.roles", "= { 'name': members_Namelist, 'start': [None] * len(members), 'exit': [None]", "datetime import pandas as pd from dateutil.relativedelta import relativedelta from", "df = df.drop(columns=['name']) all_df = pd.merge(all_df, df , left_index=True, right_index=True)", "RoleList: list[int]): \"\"\" [VC] 生のログデータを計算して、表にして返す。 Args: client (discord.Client): クライアント RoleList", "None # JSON取得 orig_TimeData : dict try : with open(", "-- df_dict[\"exit\"][indexNum] = item[\"time\"] # 差分計算 a_time = datetime.datetime.strptime( df_dict[\"start\"][indexNum]", "\"\"\" data = [] for guild_item in client.guilds : #", "def UserRoleMember( client: discord.Client, RoleList: list[int] ) : \"\"\" [VC]", "for member in members : IDlist.append( member.id ) Namelist.append( member.name", "+ str(time) ) if time < 0.0 : df_dict[\"time\"][indexNum] +=", "async def makeOldTimeList( client: discord.Client, MonthFileList:list[str] , IndexLabel:list[str], RoleList: list[int]", ") as f: orig_TimeData = json.load(f) except : CPrint.error_print(\"JSONではありません\") import", "is None : break labelname = IndexLabel[MonthFileList.index(fileName)] df = df.rename(columns={'time':", "if item[\"Flag\"] == \"entry\" : df_dict[\"start\"][indexNum] = item[\"time\"] if item[\"Flag\"]", "member in members : IDlist.append( member.id ) Namelist.append( member.name +", "1 # 調査用に+1してあるので、実際の値は、これにold_monthに-1したものとなる。 break labels.append( m_year + \"/\" + m_month", "return None #df = pd.DataFrame({ # 'start': [None, None], #", "filename ) : old_month -= 1 # 調査用に+1してあるので、実際の値は、これにold_monthに-1したものとなる。 break labels.append(", "df , left_index=True) #df.loc[:,[labelname]] #pprint(all_df) return all_df async def UserRoleMember(", "members_IDlist is None or members_IDlist == [] : return None", "ユーザーリスト取得 members = await UserRoleMember(client, RoleList) # IDだけ抽出 def getID(members:", "00:00:00\") df_dict[\"start\"][indexNum] = tmp_startTime # -- df_dict[\"exit\"][indexNum] = item[\"time\"] #", "} # 計算 for item in orig_TimeData : try :", "or ID) return: pd.DataFrame: 計算済みデータ \"\"\" # ユーザーリスト取得 members =", "ギルドデータ更新 await guild_item.chunk() # ロール制限がなければ、全員分を取ってくる if len(RoleList) == 0 :", "ValueError as error : # 現在の鯖に、存在しない人は処理しない。 continue if item[\"Flag\"] ==", "traceback traceback.print_exc() return None if orig_TimeData is None : return", "df_dict[\"start\"][indexNum] , '%Y/%m/%d %H:%M:%S') b_time = datetime.datetime.strptime( df_dict[\"exit\"][indexNum] , '%Y/%m/%d", "dateutil.relativedelta import relativedelta from pprint import pprint import base.ColorPrint as", "== [] : return None # JSON取得 orig_TimeData : dict", "= item[\"time\"] # 差分計算 a_time = datetime.datetime.strptime( df_dict[\"start\"][indexNum] , '%Y/%m/%d", "= 1 labels = [] fileNameList = [] while True", "+= guild_item.members continue # ロール制限がなければ、該当ロール部を取ってくる for role_item in guild_item.roles :", "list[int] ) : \"\"\" [VC] 指定ロールに参加しているメンバーを抽出する Args: client (discord.Client): クライアント", "None], # 'end': [None, None], # 'time': [13, 23]}, #", "[] Namelist = [] for member in members : IDlist.append(", "= [] while True : filetime = datetime.datetime.today() - relativedelta(months=old_month)", "[None] * len(members), 'exit': [None] * len(members), 'time': [0.0] *", "if not os.path.exists( filename ) : old_month -= 1 #", ": old_month = 1 labels = [] fileNameList = []", "len(members), 'time': [0.0] * len(members), } # 計算 for item", "MonthFileList : df = await makeTimeList( client, Datafile_path=fileName , RoleList=RoleList)", "Datafile_path=fileName , RoleList=RoleList) #print( \"test1\" ) pprint( df ) if", "0.0 else : df_dict[\"time\"][indexNum] += time # DataFrameに変更 df =", "as CPrint import command.voice_log.Config_Main as CSetting def most_old_Month() : old_month", "\"time : \" + str(time) ) if time < 0.0", "= pd.merge(all_df, df , left_index=True, right_index=True) #all_df = pd.merge(all_df, df", "filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + m_year + m_month +", "except : CPrint.error_print(\"JSONではありません\") import traceback traceback.print_exc() return None if orig_TimeData", "pd.DataFrame: 計算済みデータ \"\"\" # ユーザーリスト取得 members = await UserRoleMember(client, RoleList)", "pandas as pd from dateutil.relativedelta import relativedelta from pprint import", ": filetime = datetime.datetime.today() - relativedelta(months=old_month) m_month = datetime.datetime.strftime(filetime,'%m') m_year", "except ValueError as error : # 現在の鯖に、存在しない人は処理しない。 continue if item[\"Flag\"]", "datetime.datetime.today() - relativedelta(months=old_month) m_month = datetime.datetime.strftime(filetime,'%m') m_year = datetime.datetime.strftime(filetime,'%Y') filename", "m_year + m_month + \".json\" if not os.path.exists( filename )", "datetime.datetime.strptime( df_dict[\"exit\"][indexNum] , '%Y/%m/%d %H:%M:%S') time : float = (b_time", "'end': [None, None], # 'time': [13, 23]}, # index=['ONE', 'TWO']", "RoleList) # IDだけ抽出 def getID(members: list[discord.Member]): IDlist = [] Namelist", "# -- df_dict[\"exit\"][indexNum] = item[\"time\"] # 差分計算 a_time = datetime.datetime.strptime(", "tmp_startTime # -- df_dict[\"exit\"][indexNum] = item[\"time\"] # 差分計算 a_time =", "None : break labelname = IndexLabel[MonthFileList.index(fileName)] df = df.rename(columns={'time': labelname", "= None for fileName in MonthFileList : df = await", "in guild_item.roles : if role_item.id in RoleList : data +=", "list[int]): \"\"\" [VC] 生のログデータを計算して、表にして返す。 Args: client (discord.Client): クライアント RoleList (list[int]):", "if orig_TimeData is None : return None #df = pd.DataFrame({", ", IndexLabel:list[str], RoleList: list[int] = CSetting.OneMonthOutput_RoleID ): all_df = None", "IDlist.append( member.id ) Namelist.append( member.name + \"#\" + member.discriminator )", "== 0 : all_df = df else : df =", "# 'end': [None, None], # 'time': [13, 23]}, # index=['ONE',", "= [] for guild_item in client.guilds : # ギルドデータ更新 await", ": # 現在の鯖に、存在しない人は処理しない。 continue if item[\"Flag\"] == \"entry\" : df_dict[\"start\"][indexNum]", "0 : data += guild_item.members continue # ロール制限がなければ、該当ロール部を取ってくる for role_item", "in client.guilds : # ギルドデータ更新 await guild_item.chunk() # ロール制限がなければ、全員分を取ってくる if", "getID(members: list[discord.Member]): IDlist = [] Namelist = [] for member", "'start': [None] * len(members), 'exit': [None] * len(members), 'time': [0.0]", "in MonthFileList : df = await makeTimeList( client, Datafile_path=fileName ,", "UserRoleMember(client, RoleList) # IDだけ抽出 def getID(members: list[discord.Member]): IDlist = []", "* len(members), 'exit': [None] * len(members), 'time': [0.0] * len(members),", ": df = await makeTimeList( client, Datafile_path=fileName , RoleList=RoleList) #print(", "+= 1 return old_month , labels , fileNameList async def", "役職ID mode (string): ユーザーを示すものは、何か?(UserName or ID) return: pd.DataFrame: 計算済みデータ \"\"\"", "labelname }) if MonthFileList.index(fileName) == 0 : all_df = df", ": if role_item.id in RoleList : data += role_item.members return", ": df_dict[\"time\"][indexNum] += time # DataFrameに変更 df = pd.DataFrame(df_dict, index=members_IDlist", "= [] fileNameList = [] while True : filetime =", "\".json\" if not os.path.exists( filename ) : old_month -= 1", "import traceback traceback.print_exc() return None if orig_TimeData is None :", "def makeOldTimeList( client: discord.Client, MonthFileList:list[str] , IndexLabel:list[str], RoleList: list[int] =", "= [] Namelist = [] for member in members :", "pprint( df ) if df is None : break labelname", "# index=['ONE', 'TWO'] #) df_dict = { 'name': members_Namelist, 'start':", "list[discord.Member]): IDlist = [] Namelist = [] for member in", "-= 1 # 調査用に+1してあるので、実際の値は、これにold_monthに-1したものとなる。 break labels.append( m_year + \"/\" +", "df_dict = { 'name': members_Namelist, 'start': [None] * len(members), 'exit':", "import relativedelta from pprint import pprint import base.ColorPrint as CPrint", "all_df = None for fileName in MonthFileList : df =", "json.load(f) except : CPrint.error_print(\"JSONではありません\") import traceback traceback.print_exc() return None if", "def most_old_Month() : old_month = 1 labels = [] fileNameList", "# 'start': [None, None], # 'end': [None, None], # 'time':", "\"exit\" : # スタートがないのに、エンドがある場合 if df_dict[\"start\"][indexNum] is None : #", "role_item.members return data async def makeTimeList( client: discord.Client, Datafile_path: str", "#all_df = pd.merge(all_df, df , left_index=True) #df.loc[:,[labelname]] #pprint(all_df) return all_df", "time < 0.0 : df_dict[\"time\"][indexNum] += 0.0 else : df_dict[\"time\"][indexNum]", "datetime.now().strftime(\"%Y/%m/01 00:00:00\") df_dict[\"start\"][indexNum] = tmp_startTime # -- df_dict[\"exit\"][indexNum] = item[\"time\"]", "item in orig_TimeData : try : indexNum = members_IDlist.index(item[\"member.id\"]) except", "%H:%M:%S') time : float = (b_time - a_time).total_seconds() #print( \"time", "return data async def makeTimeList( client: discord.Client, Datafile_path: str ,", "from pprint import pprint import base.ColorPrint as CPrint import command.voice_log.Config_Main", "data = [] for guild_item in client.guilds : # ギルドデータ更新", "json import datetime import pandas as pd from dateutil.relativedelta import", "discord.Client, MonthFileList:list[str] , IndexLabel:list[str], RoleList: list[int] = CSetting.OneMonthOutput_RoleID ): all_df", ") : \"\"\" [VC] 指定ロールに参加しているメンバーを抽出する Args: client (discord.Client): クライアント RoleList", "# とりあえず、月初めに入室した扱いにする(他の方法も検討中。そもそも入室してない扱いetc..) tmp_startTime = datetime.now().strftime(\"%Y/%m/01 00:00:00\") df_dict[\"start\"][indexNum] = tmp_startTime #", "= CSetting.baseLogFolder + CSetting.JSONPATH_row + m_year + m_month + \".json\"", "import json import datetime import pandas as pd from dateutil.relativedelta", "pd.merge(all_df, df , left_index=True) #df.loc[:,[labelname]] #pprint(all_df) return all_df async def", "f: orig_TimeData = json.load(f) except : CPrint.error_print(\"JSONではありません\") import traceback traceback.print_exc()", "[None, None], # 'end': [None, None], # 'time': [13, 23]},", "# 'time': [13, 23]}, # index=['ONE', 'TWO'] #) df_dict =", "m_month ) fileNameList.append( filename ) old_month += 1 return old_month", "#print( \"test1\" ) pprint( df ) if df is None", "# 計算 df[\"time\"] = df[\"time\"] / 60 / 60 #pprint(df)", "(discord.Client): クライアント RoleList (list[int]): 役職ID return: list[discord.Member]: 指定ロールに参加しているメンバー \"\"\" data", "= df else : df = df.drop(columns=['name']) all_df = pd.merge(all_df,", "RoleList (list[int]): 役職ID return: list[discord.Member]: 指定ロールに参加しているメンバー \"\"\" data = []", "str , RoleList: list[int]): \"\"\" [VC] 生のログデータを計算して、表にして返す。 Args: client (discord.Client):", "# スタートがないのに、エンドがある場合 if df_dict[\"start\"][indexNum] is None : # とりあえず、月初めに入室した扱いにする(他の方法も検討中。そもそも入室してない扱いetc..) tmp_startTime", "}) if MonthFileList.index(fileName) == 0 : all_df = df else", ", members_Namelist = getID(members=members) if members_IDlist is None or members_IDlist", "作業用の\"start\"と\"end\"を削除 df = df.drop(columns=['start','exit']) # 計算 df[\"time\"] = df[\"time\"] /", ": break labelname = IndexLabel[MonthFileList.index(fileName)] df = df.rename(columns={'time': labelname })", "CSetting def most_old_Month() : old_month = 1 labels = []", "#) df_dict = { 'name': members_Namelist, 'start': [None] * len(members),", "現在の鯖に、存在しない人は処理しない。 continue if item[\"Flag\"] == \"entry\" : df_dict[\"start\"][indexNum] = item[\"time\"]", "= datetime.datetime.strftime(filetime,'%Y') filename = CSetting.baseLogFolder + CSetting.JSONPATH_row + m_year +", "labelname = IndexLabel[MonthFileList.index(fileName)] df = df.rename(columns={'time': labelname }) if MonthFileList.index(fileName)", "# 調査用に+1してあるので、実際の値は、これにold_monthに-1したものとなる。 break labels.append( m_year + \"/\" + m_month )", "members_IDlist == [] : return None # JSON取得 orig_TimeData :", "== 0 : data += guild_item.members continue # ロール制限がなければ、該当ロール部を取ってくる for", "(string): ユーザーを示すものは、何か?(UserName or ID) return: pd.DataFrame: 計算済みデータ \"\"\" # ユーザーリスト取得", ": all_df = df else : df = df.drop(columns=['name']) all_df", "= pd.merge(all_df, df , left_index=True) #df.loc[:,[labelname]] #pprint(all_df) return all_df async", "= members_IDlist.index(item[\"member.id\"]) except ValueError as error : # 現在の鯖に、存在しない人は処理しない。 continue", "getID(members=members) if members_IDlist is None or members_IDlist == [] :", "makeTimeList( client, Datafile_path=fileName , RoleList=RoleList) #print( \"test1\" ) pprint( df", "return: list[discord.Member]: 指定ロールに参加しているメンバー \"\"\" data = [] for guild_item in", "None], # 'time': [13, 23]}, # index=['ONE', 'TWO'] #) df_dict", "left_index=True) #df.loc[:,[labelname]] #pprint(all_df) return all_df async def UserRoleMember( client: discord.Client,", "CPrint.error_print(\"JSONではありません\") import traceback traceback.print_exc() return None if orig_TimeData is None", "guild_item.roles : if role_item.id in RoleList : data += role_item.members" ]
[ "n, m: (int(end.split(\".\")[n]) - int(start.split(\".\")[n])) * m return calc(0, 256", "(int(end.split(\".\")[n]) - int(start.split(\".\")[n])) * m return calc(0, 256 * 256", "- int(start.split(\".\")[n])) * m return calc(0, 256 * 256 *", "m return calc(0, 256 * 256 * 256) + calc(1,", "256 * 256 * 256) + calc(1, 256 * 256)", "256) + calc(1, 256 * 256) + calc(2, 256) +", "= lambda n, m: (int(end.split(\".\")[n]) - int(start.split(\".\")[n])) * m return", "return calc(0, 256 * 256 * 256) + calc(1, 256", "256 * 256) + calc(1, 256 * 256) + calc(2,", "calc(0, 256 * 256 * 256) + calc(1, 256 *", "def ips_between(start, end): calc = lambda n, m: (int(end.split(\".\")[n]) -", "calc = lambda n, m: (int(end.split(\".\")[n]) - int(start.split(\".\")[n])) * m", "ips_between(start, end): calc = lambda n, m: (int(end.split(\".\")[n]) - int(start.split(\".\")[n]))", "* 256) + calc(1, 256 * 256) + calc(2, 256)", "calc(1, 256 * 256) + calc(2, 256) + calc(3, 1)", "end): calc = lambda n, m: (int(end.split(\".\")[n]) - int(start.split(\".\")[n])) *", "m: (int(end.split(\".\")[n]) - int(start.split(\".\")[n])) * m return calc(0, 256 *", "lambda n, m: (int(end.split(\".\")[n]) - int(start.split(\".\")[n])) * m return calc(0,", "* 256 * 256) + calc(1, 256 * 256) +", "* m return calc(0, 256 * 256 * 256) +", "int(start.split(\".\")[n])) * m return calc(0, 256 * 256 * 256)", "+ calc(1, 256 * 256) + calc(2, 256) + calc(3," ]
[ "a bug in adm feature passing scale into dwt_quant_step #", "= cls.get_scores_key('vifdiff_den_scale3') vifdiff_scale0_scores_key = cls.get_scores_key('vifdiff_scale0') vifdiff_scale1_scores_key = cls.get_scores_key('vifdiff_scale1') vifdiff_scale2_scores_key =", "val = None atom_feature_scores_dict[atom_feature].append(val) atom_feature_idx_dict[atom_feature] += 1 continue len_score =", "assert len_score != 0 for atom_feature in self.ATOM_FEATURES[1:]: assert len_score", "ATOM_FEATURES = ['ms_ssim', 'ms_ssim_l_scale0', 'ms_ssim_c_scale0', 'ms_ssim_s_scale0', 'ms_ssim_l_scale1', 'ms_ssim_c_scale1', 'ms_ssim_s_scale1', 'ms_ssim_l_scale2',", "= ['psnr'] def _generate_result(self, asset): # routine to call the", "convolution; update adm features by folding noise floor into per", "cls.ADM2_CONSTANT) / (np.array(result.result_dict[adm_den_scores_key]) + cls.ADM2_CONSTANT) ) # vif_scalei = vif_num_scalei", "/ np.array(result.result_dict[vifdiff_den_scale2_scores_key])) ) result.result_dict[vifdiff_scale3_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale3_scores_key]) / np.array(result.result_dict[vifdiff_den_scale3_scores_key])) )", "feature_result[self.get_scores_key('ref2nd')] = list(ref_scores_mtx[:, 1]) feature_result[self.get_scores_key('dis1st')] = list(dis_scores_mtx[:, 0]) feature_result[self.get_scores_key('dis2nd')] =", "+ (vif_num_scale3 / vif_den_scale3)) / 4.0 vif_scores_key = cls.get_scores_key('vif2') result.result_dict[vif_scores_key]", "2 # dis1st, dis2nd feature_result = {} feature_result[self.get_scores_key('ref1st')] = list(ref_scores_mtx[:,", "= int(mo.group(1)) assert cur_idx == atom_feature_idx_dict[atom_feature] # parse value, allowing", "\"MS_SSIM_feature\" # VERSION = \"1.0\" VERSION = \"1.1\" # fix", "'0.2' # expose vif_num, vif_den, adm_num, adm_den, anpsnr # VERSION", "['ms_ssim', 'ms_ssim_l_scale0', 'ms_ssim_c_scale0', 'ms_ssim_s_scale0', 'ms_ssim_l_scale1', 'ms_ssim_c_scale1', 'ms_ssim_s_scale1', 'ms_ssim_l_scale2', 'ms_ssim_c_scale2', 'ms_ssim_s_scale2',", "For an example, follow VmafFeatureExtractor. \"\"\" __metaclass__ = ABCMeta @property", "combination (by the TYPE and VERSION attribute), so that the", "# VERSION = '0.2.4b' # Modify by adding ADM noise", "as ref_yuv_reader: scores_mtx_list = [] i = 0 for ref_yuv", "= \\ to_list(map(get_var, zip(result.result_dict[dis1st_scores_key], result.result_dict[dis2nd_scores_key]))) # validate for feature in", "open(log_file_path, 'rt') as log_file: for line in log_file.readlines(): for atom_feature", "case # VERSION = '0.2.2b' # expose adm_den/num_scalex # VERSION", "from vmaf.core.result import Result from vmaf.tools.reader import YuvReader class FeatureExtractor(Executor):", "code ATOM_FEATURES = ['vif', 'adm', 'ansnr', 'motion', 'motion2', 'vif_num', 'vif_den',", "== len(atom_feature_scores_dict[atom_feature]), \\ \"Feature data possibly corrupt. Run cleanup script", "= cls.get_scores_key('disvar') dis1st_scores_key = cls.get_scores_key('dis1st') dis2nd_scores_key = cls.get_scores_key('dis2nd') get_var =", "!= 0 for atom_feature in self.ATOM_FEATURES[1:]: assert len_score == len(atom_feature_scores_dict[atom_feature]),", ") # vif2 = # ((vif_num_scale0 / vif_den_scale0) + (vif_num_scale1", "import Executor from vmaf.core.result import Result from vmaf.tools.reader import YuvReader", "yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_vmaf_feature(yuv_type, ref_path,", "dis_yuv_reader: scores_mtx_list = [] i = 0 for dis_yuv in", "asset): result = {} result.update(self._get_feature_scores(asset)) executor_id = self.executor_id return Result(asset,", "# adm abs-->fabs, corrected border handling, uniform reading with option", "with option of offset for input YUV, updated VIF corner", "'adm_scale3', ] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT = 0 def _generate_result(self,", "cls.get_scores_key('dis1st') dis2nd_scores_key = cls.get_scores_key('dis2nd') get_var = lambda m: m[1] -", "/ np.array(result.result_dict[vif_den_scale2_scores_key])) + (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) / 4.0 )", "3 vif_num_scale0_scores_key = cls.get_scores_key('vif_num_scale0') vif_den_scale0_scores_key = cls.get_scores_key('vif_den_scale0') vif_num_scale1_scores_key = cls.get_scores_key('vif_num_scale1')", "TYPE = \"Moment_feature\" # VERSION = \"1.0\" # call executable", "vmaf import ExternalProgramCaller, to_list from vmaf.config import VmafConfig, VmafExternalConfig from", "ref2nd_scores_key = cls.get_scores_key('ref2nd') disvar_scores_key = cls.get_scores_key('disvar') dis1st_scores_key = cls.get_scores_key('dis1st') dis2nd_scores_key", "cls)._post_process_result(result) # vifdiff_scalei = vifdiff_num_scalei / vifdiff_den_scalei, i = 0,", "identified. A derived class of FeatureExtractor must: 1) Override TYPE", "ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_vifdiff_feature(yuv_type, ref_path, dis_path,", "list( (np.array(result.result_dict[vifdiff_num_scale2_scores_key]) / np.array(result.result_dict[vifdiff_den_scale2_scores_key])) ) result.result_dict[vifdiff_scale3_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale3_scores_key]) /", "/ (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale1_scores_key] = list( (np.array(result.result_dict[adm_num_scale1_scores_key]) +", "root; add derived feature motion2 VERSION = '0.2.4c' # Modify", "from the log file, and return the scores in a", "adm_den_scalei, i = 0, 1, 2, 3 adm_num_scale0_scores_key = cls.get_scores_key('adm_num_scale0')", "'adm_den_scale3', ] DERIVED_ATOM_FEATURES = ['vif_scale0', 'vif_scale1', 'vif_scale2', 'vif_scale3', 'vif2', 'adm2',", "((np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale1_scores_key]) +", "return result class PsnrFeatureExtractor(FeatureExtractor): TYPE = \"PSNR_feature\" VERSION = \"1.0\"", "in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in result.result_dict return result class VifFrameDifferenceFeatureExtractor(FeatureExtractor):", "calculate refvar and disvar from ref1st, ref2nd, dis1st, dis2nd refvar_scores_key", "'adm_scale2', 'adm_scale3', ] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT = 0 def", "vmaf.core.result import Result from vmaf.tools.reader import YuvReader class FeatureExtractor(Executor): \"\"\"", "# python only ATOM_FEATURES = ['ref1st', 'ref2nd', 'dis1st', 'dis2nd', ]", "ref_path, dis_path, w, h, log_file_path, logger) class MomentFeatureExtractor(FeatureExtractor): TYPE =", "'ms_ssim_l_scale1', 'ms_ssim_c_scale1', 'ms_ssim_s_scale1', 'ms_ssim_l_scale2', 'ms_ssim_c_scale2', 'ms_ssim_s_scale2', 'ms_ssim_l_scale3', 'ms_ssim_c_scale3', 'ms_ssim_s_scale3', 'ms_ssim_l_scale4',", "floor outside cube root; add derived feature motion2 VERSION =", "atom_feature_scores_dict[atom_feature].append(val) atom_feature_idx_dict[atom_feature] += 1 continue len_score = len(atom_feature_scores_dict[self.ATOM_FEATURES[0]]) assert len_score", "= dis_y.mean() secondm = dis_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i", "# + ((adm_num_scale1 + ADM_SCALE_CONSTANT) / (adm_den_scale1 + ADM_SCALE_CONSTANT)) #", "scale into dwt_quant_step # VERSION = '0.2.4b' # Modify by", "(adm_num + ADM2_CONSTANT) / (adm_den + ADM2_CONSTANT) adm2_scores_key = cls.get_scores_key('adm2')", "format. log_file_path = self._get_log_file_path(asset) with open(log_file_path, 'rt') as log_file: log_str", "\"1.0\" # call executable VERSION = \"1.1\" # python only", "ExternalProgramCaller.call_vmaf_feature(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) @classmethod def _post_process_result(cls,", "= self.logger ExternalProgramCaller.call_vifdiff_feature(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) @classmethod", "= cls.get_scores_key('vif_num_scale2') vif_den_scale2_scores_key = cls.get_scores_key('vif_den_scale2') vif_num_scale3_scores_key = cls.get_scores_key('vif_num_scale3') vif_den_scale3_scores_key =", "'vifdiff_num_scale0', 'vifdiff_den_scale0', 'vifdiff_num_scale1', 'vifdiff_den_scale1', 'vifdiff_num_scale2', 'vifdiff_den_scale2', 'vifdiff_num_scale3', 'vifdiff_den_scale3', ] DERIVED_ATOM_FEATURES", "= cls.get_scores_key('vif_den_scale1') vif_num_scale2_scores_key = cls.get_scores_key('vif_num_scale2') vif_den_scale2_scores_key = cls.get_scores_key('vif_den_scale2') vif_num_scale3_scores_key =", "atom_feature): return \"{type}_{atom_feature}_scores\".format( type=cls.TYPE, atom_feature=atom_feature) @classmethod def get_score_key(cls, atom_feature): return", "= 0 with open(log_file_path, 'rt') as log_file: for line in", "the log file, and return the scores in a dictionary", "['vif', 'adm', 'ansnr', 'motion', 'motion2', 'vif_num', 'vif_den', 'adm_num', 'adm_den', 'anpsnr',", "vifdiff_den_scale0_scores_key = cls.get_scores_key('vifdiff_den_scale0') vifdiff_num_scale1_scores_key = cls.get_scores_key('vifdiff_num_scale1') vifdiff_den_scale1_scores_key = cls.get_scores_key('vifdiff_den_scale1') vifdiff_num_scale2_scores_key", "i = 0, 1, 2, 3 vif_num_scale0_scores_key = cls.get_scores_key('vif_num_scale0') vif_den_scale0_scores_key", "return result class SsimFeatureExtractor(FeatureExtractor): TYPE = \"SSIM_feature\" # VERSION =", "(vif_num_scale2 / vif_den_scale2) + (vif_num_scale3 / vif_den_scale3)) / 4.0 vif_scores_key", "+ cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale3_scores_key] = list(", "/ vif_den_scale3)) / 4.0 vif_scores_key = cls.get_scores_key('vif2') result.result_dict[vif_scores_key] = list(", "feature_result[self.get_scores_key('ref1st')] = list(ref_scores_mtx[:, 0]) feature_result[self.get_scores_key('ref2nd')] = list(ref_scores_mtx[:, 1]) feature_result[self.get_scores_key('dis1st')] =", "(vif_num_scale1 / vif_den_scale1) + # (vif_num_scale2 / vif_den_scale2) + (vif_num_scale3", "a unique type and version combination (by the TYPE and", "vif_scale1_scores_key = cls.get_scores_key('vif_scale1') vif_scale2_scores_key = cls.get_scores_key('vif_scale2') vif_scale3_scores_key = cls.get_scores_key('vif_scale3') result.result_dict[vif_scale0_scores_key]", "dictionary format. log_file_path = self._get_log_file_path(asset) with open(log_file_path, 'rt') as log_file:", "log file. 3) Override _get_feature_scores(self, asset), which read the feature", "be identified. A derived class of FeatureExtractor must: 1) Override", "cur_idx = int(mo.group(1)) assert cur_idx == atom_feature_idx_dict[atom_feature] # parse value,", "(np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale3_scores_key] = list( (np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT)", "== 2 # dis1st, dis2nd feature_result = {} feature_result[self.get_scores_key('ref1st')] =", "vifdiff_num_scale1_scores_key = cls.get_scores_key('vifdiff_num_scale1') vifdiff_den_scale1_scores_key = cls.get_scores_key('vifdiff_den_scale1') vifdiff_num_scale2_scores_key = cls.get_scores_key('vifdiff_num_scale2') vifdiff_den_scale2_scores_key", "'ms_ssim_l_scale3', 'ms_ssim_c_scale3', 'ms_ssim_s_scale3', 'ms_ssim_l_scale4', 'ms_ssim_c_scale4', 'ms_ssim_s_scale4', ] def _generate_result(self, asset):", "= cls.get_scores_key('adm_num_scale0') adm_den_scale0_scores_key = cls.get_scores_key('adm_den_scale0') adm_num_scale1_scores_key = cls.get_scores_key('adm_num_scale1') adm_den_scale1_scores_key =", "= log_file.read() log_dict = ast.literal_eval(log_str) ref_scores_mtx = np.array(log_dict['ref_scores_mtx']) dis_scores_mtx =", "result = super(VmafFeatureExtractor, cls)._post_process_result(result) # adm2 = # (adm_num +", "VmafFeatureExtractor. \"\"\" __metaclass__ = ABCMeta @property @abstractmethod def ATOM_FEATURES(self): raise", "zip(result.result_dict[ref1st_scores_key], result.result_dict[ref2nd_scores_key]))) result.result_dict[disvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[dis1st_scores_key], result.result_dict[dis2nd_scores_key]))) # validate", "assert len_score == len(atom_feature_scores_dict[atom_feature]), \\ \"Feature data possibly corrupt. Run", "'adm2', 'adm3', 'adm_scale0', 'adm_scale1', 'adm_scale2', 'adm_scale3', ] ADM2_CONSTANT = 0", "adm_den/num_scalex # VERSION = '0.2.3' # AVX for VMAF convolution;", "cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale1_scores_key] = list( (np.array(result.result_dict[adm_num_scale1_scores_key])", "generate feature scores in a log file. 3) Override _get_feature_scores(self,", "generate feature # scores in the log file. quality_width, quality_height", "# scores in the log file. quality_w, quality_h = asset.quality_width_height", "from vmaf.tools.stats import ListStats __copyright__ = \"Copyright 2016-2018, Netflix, Inc.\"", "log file. quality_width, quality_height = asset.quality_width_height log_file_path = self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset)", "ast.literal_eval(log_str) ref_scores_mtx = np.array(log_dict['ref_scores_mtx']) dis_scores_mtx = np.array(log_dict['dis_scores_mtx']) _, num_ref_features =", "def _post_process_result(cls, result): # override Executor._post_process_result result = super(MomentFeatureExtractor, cls)._post_process_result(result)", "VIF fix # VERSION = '0.2' # expose vif_num, vif_den,", "vmaf.config import VmafConfig, VmafExternalConfig from vmaf.core.executor import Executor from vmaf.core.result", "self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_ms_ssim(yuv_type,", "log_file.write(str(log_dict)) def _get_feature_scores(self, asset): # routine to read the feature", "python only ATOM_FEATURES = ['ref1st', 'ref2nd', 'dis1st', 'dis2nd', ] DERIVED_ATOM_FEATURES", "of corresponding results. A FeatureExtractor must specify a unique type", "specify a unique type and version combination (by the TYPE", "from vmaf.tools.misc import make_absolute_path, run_process from vmaf.tools.stats import ListStats __copyright__", "'adm_den_scale2', 'adm_num_scale3', 'adm_den_scale3', ] DERIVED_ATOM_FEATURES = ['vif_scale0', 'vif_scale1', 'vif_scale2', 'vif_scale3',", "] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT = 0 def _generate_result(self, asset):", "cls.get_scores_key('ref2nd') disvar_scores_key = cls.get_scores_key('disvar') dis1st_scores_key = cls.get_scores_key('dis1st') dis2nd_scores_key = cls.get_scores_key('dis2nd')", "abs-->fabs, corrected border handling, uniform reading with option of offset", "import os from vmaf.tools.misc import make_absolute_path, run_process from vmaf.tools.stats import", "quality_h = asset.quality_width_height ref_scores_mtx = None with YuvReader(filepath=asset.ref_workfile_path, width=quality_w, height=quality_h,", "cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT)) )", "open(log_file_path, 'wt') as log_file: log_file.write(str(log_dict)) def _get_feature_scores(self, asset): # routine", "(((adm_num_scale0 + ADM_SCALE_CONSTANT) / (adm_den_scale0 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale1", "parse value, allowing NaN and inf val = float(mo.group(2)) if", "'adm_scale1', 'adm_scale2', 'adm_scale3', ] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT = 0", "= self.logger ExternalProgramCaller.call_psnr(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) class", "= cls.get_scores_key('vif_num_scale1') vif_den_scale1_scores_key = cls.get_scores_key('vif_den_scale1') vif_num_scale2_scores_key = cls.get_scores_key('vif_num_scale2') vif_den_scale2_scores_key =", "vifdiff_scale3_scores_key = cls.get_scores_key('vifdiff_scale3') result.result_dict[vifdiff_scale0_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale0_scores_key]) / np.array(result.result_dict[vifdiff_den_scale0_scores_key])) )", "0 ATOM_FEATURES = ['ssim', 'ssim_l', 'ssim_c', 'ssim_s'] def _generate_result(self, asset):", "np.array(result.result_dict[vif_den_scale2_scores_key])) + (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) / 4.0 ) #", "try again.\" feature_result = {} for atom_feature in self.ATOM_FEATURES: scores_key", "result.result_dict[vifdiff_scale0_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale0_scores_key]) / np.array(result.result_dict[vifdiff_den_scale0_scores_key])) ) result.result_dict[vifdiff_scale1_scores_key] = list(", "atom_feature): return \"{type}_{atom_feature}_score\".format( type=cls.TYPE, atom_feature=atom_feature) def _get_feature_scores(self, asset): # routine", "= cls.get_scores_key('vifdiff_scale0') vifdiff_scale1_scores_key = cls.get_scores_key('vifdiff_scale1') vifdiff_scale2_scores_key = cls.get_scores_key('vifdiff_scale2') vifdiff_scale3_scores_key =", "'0.2.2b' # expose adm_den/num_scalex # VERSION = '0.2.3' # AVX", "a dictionary format. log_file_path = self._get_log_file_path(asset) with open(log_file_path, 'rt') as", "for VMAF convolution; update adm features by folding noise floor", "'vifdiff_scale3', ] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT = 0 def _generate_result(self,", "to read the feature scores from the log file, and", "= self._get_log_file_path(asset) atom_feature_scores_dict = {} atom_feature_idx_dict = {} for atom_feature", "command-line executable and generate quality # scores in the log", "= 0, 1, 2, 3 vif_num_scale0_scores_key = cls.get_scores_key('vif_num_scale0') vif_den_scale0_scores_key =", "i += 1 ref_scores_mtx = np.vstack(scores_mtx_list) dis_scores_mtx = None with", "asset): # routine to read the feature scores from the", "to call the command-line executable and generate feature # scores", "= '0.2.4c' # Modify by moving motion2 to c code", "cls.get_scores_key('adm_den_scale3') adm_scale0_scores_key = cls.get_scores_key('adm_scale0') adm_scale1_scores_key = cls.get_scores_key('adm_scale1') adm_scale2_scores_key = cls.get_scores_key('adm_scale2')", "mo: cur_idx = int(mo.group(1)) assert cur_idx == atom_feature_idx_dict[atom_feature] # parse", "list of corresponding results. A FeatureExtractor must specify a unique", "= vif_num_scalei / vif_den_scalei, i = 0, 1, 2, 3", "# the scores in a dictionary format. log_file_path = self._get_log_file_path(asset)", "TYPE = \"MS_SSIM_feature\" # VERSION = \"1.0\" VERSION = \"1.1\"", "i = 0, 1, 2, 3 adm_num_scale0_scores_key = cls.get_scores_key('adm_num_scale0') adm_den_scale0_scores_key", "/ vif_den_scale2) + (vif_num_scale3 / vif_den_scale3)) / 4.0 vif_scores_key =", "list(ref_scores_mtx[:, 0]) feature_result[self.get_scores_key('ref2nd')] = list(ref_scores_mtx[:, 1]) feature_result[self.get_scores_key('dis1st')] = list(dis_scores_mtx[:, 0])", "log_file_path, logger) @classmethod def _post_process_result(cls, result): # override Executor._post_process_result result", "feature_result[self.get_scores_key('dis1st')] = list(dis_scores_mtx[:, 0]) feature_result[self.get_scores_key('dis2nd')] = list(dis_scores_mtx[:, 1]) return feature_result", "ref_path, dis_path, w, h, log_file_path, logger) class MsSsimFeatureExtractor(FeatureExtractor): TYPE =", "\"SSIM_feature\" # VERSION = \"1.0\" VERSION = \"1.1\" # fix", "a list of assets, and run feature extraction on them,", "return \"{type}_{atom_feature}_scores\".format( type=cls.TYPE, atom_feature=atom_feature) @classmethod def get_score_key(cls, atom_feature): return \"{type}_{atom_feature}_score\".format(", "adding ADM noise floor outside cube root; add derived feature", "in dis_yuv_reader: dis_y = dis_yuv[0] firstm = dis_y.mean() secondm =", "+ cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT)) ) / 4.0 )", "= \"VifDiff_feature\" VERSION = '0.1' ATOM_FEATURES = ['vifdiff', 'vifdiff_num', 'vifdiff_den',", "ref_y = ref_yuv[0] firstm = ref_y.mean() secondm = ref_y.var() +", "(np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT)) ) / 4.0 ) # validate for", "coef # VERSION = '0.2.4' # Fix a bug in", "assert cur_idx == atom_feature_idx_dict[atom_feature] # parse value, allowing NaN and", "# (((adm_num_scale0 + ADM_SCALE_CONSTANT) / (adm_den_scale0 + ADM_SCALE_CONSTANT)) # +", "result.result_dict return result class VifFrameDifferenceFeatureExtractor(FeatureExtractor): TYPE = \"VifDiff_feature\" VERSION =", "def _post_process_result(cls, result): # override Executor._post_process_result result = super(VmafFeatureExtractor, cls)._post_process_result(result)", "= cls.get_scores_key('adm_den_scale2') adm_num_scale3_scores_key = cls.get_scores_key('adm_num_scale3') adm_den_scale3_scores_key = cls.get_scores_key('adm_den_scale3') adm_scale0_scores_key =", "cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT)) +", "'vifdiff_den_scale0', 'vifdiff_num_scale1', 'vifdiff_den_scale1', 'vifdiff_num_scale2', 'vifdiff_den_scale2', 'vifdiff_num_scale3', 'vifdiff_den_scale3', ] DERIVED_ATOM_FEATURES =", "assets, and run feature extraction on them, and return a", "cls.get_scores_key('vifdiff_num_scale3') vifdiff_den_scale3_scores_key = cls.get_scores_key('vifdiff_den_scale3') vifdiff_scale0_scores_key = cls.get_scores_key('vifdiff_scale0') vifdiff_scale1_scores_key = cls.get_scores_key('vifdiff_scale1')", "cls.get_scores_key('adm_num_scale0') adm_den_scale0_scores_key = cls.get_scores_key('adm_den_scale0') adm_num_scale1_scores_key = cls.get_scores_key('adm_num_scale1') adm_den_scale1_scores_key = cls.get_scores_key('adm_den_scale1')", "override Executor._post_process_result result = super(VifFrameDifferenceFeatureExtractor, cls)._post_process_result(result) # vifdiff_scalei = vifdiff_num_scalei", "np.array(result.result_dict[vif_den_scale3_scores_key])) ) / 4.0 ) # adm_scalei = adm_num_scalei /", "result.result_dict[vifdiff_scale1_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale1_scores_key]) / np.array(result.result_dict[vifdiff_den_scale1_scores_key])) ) result.result_dict[vifdiff_scale2_scores_key] = list(", "/ (adm_den_scale2 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale3 + ADM_SCALE_CONSTANT) /", "floor into per coef # VERSION = '0.2.4' # Fix", "log file, and return the scores in a dictionary format.", "return Result(asset, executor_id, result) @classmethod def get_scores_key(cls, atom_feature): return \"{type}_{atom_feature}_scores\".format(", "4.0 ) # validate for feature in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature)", "= ast.literal_eval(log_str) ref_scores_mtx = np.array(log_dict['ref_scores_mtx']) dis_scores_mtx = np.array(log_dict['dis_scores_mtx']) _, num_ref_features", "= \"MS_SSIM_feature\" # VERSION = \"1.0\" VERSION = \"1.1\" #", "import YuvReader class FeatureExtractor(Executor): \"\"\" FeatureExtractor takes in a list", "'adm_scale0', 'adm_scale1', 'adm_scale2', 'adm_scale3', ] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT =", "/ vifdiff_den_scalei, i = 0, 1, 2, 3 vifdiff_num_scale0_scores_key =", "* m[0] result.result_dict[refvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[ref1st_scores_key], result.result_dict[ref2nd_scores_key]))) result.result_dict[disvar_scores_key] =", "vif_den_scale0) + (vif_num_scale1 / vif_den_scale1) + # (vif_num_scale2 / vif_den_scale2)", "vif_num_scale3_scores_key = cls.get_scores_key('vif_num_scale3') vif_den_scale3_scores_key = cls.get_scores_key('vif_den_scale3') vif_scale0_scores_key = cls.get_scores_key('vif_scale0') vif_scale1_scores_key", "atom_feature_idx_dict[atom_feature] # parse value, allowing NaN and inf val =", "asset), which call a command-line executable and generate feature scores", "override Executor._post_process_result result = super(VmafFeatureExtractor, cls)._post_process_result(result) # adm2 = #", "0 def _generate_result(self, asset): # routine to call the command-line", "1, 2, 3 vifdiff_num_scale0_scores_key = cls.get_scores_key('vifdiff_num_scale0') vifdiff_den_scale0_scores_key = cls.get_scores_key('vifdiff_den_scale0') vifdiff_num_scale1_scores_key", "to call the command-line executable and generate quality # scores", "__metaclass__ = ABCMeta @property @abstractmethod def ATOM_FEATURES(self): raise NotImplementedError def", "cls.get_scores_key('vifdiff_scale3') result.result_dict[vifdiff_scale0_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale0_scores_key]) / np.array(result.result_dict[vifdiff_den_scale0_scores_key])) ) result.result_dict[vifdiff_scale1_scores_key] =", "/ (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale2_scores_key] = list( (np.array(result.result_dict[adm_num_scale2_scores_key]) +", "ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT = 0 def _generate_result(self, asset): #", "and run feature extraction on them, and return a list", "len(atom_feature_scores_dict[self.ATOM_FEATURES[0]]) assert len_score != 0 for atom_feature in self.ATOM_FEATURES[1:]: assert", "# Modify by moving motion2 to c code ATOM_FEATURES =", "for atom_feature in self.ATOM_FEATURES: scores_key = self.get_scores_key(atom_feature) feature_result[scores_key] = atom_feature_scores_dict[atom_feature]", "cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) ) # adm3 = \\", "allowing NaN and inf val = float(mo.group(2)) if np.isnan(val) or", "self.logger ExternalProgramCaller.call_ssim(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) class MsSsimFeatureExtractor(FeatureExtractor):", "Version 2.0\" import re import numpy as np import ast", "# ((vif_num_scale0 / vif_den_scale0) + (vif_num_scale1 / vif_den_scale1) + #", "for input YUV, updated VIF corner case # VERSION =", "ADM_SCALE_CONSTANT) / (adm_den_scale3 + ADM_SCALE_CONSTANT))) / 4.0 adm3_scores_key = cls.get_scores_key('adm3')", "ATOM_FEATURES = ['ref1st', 'ref2nd', 'dis1st', 'dis2nd', ] DERIVED_ATOM_FEATURES = ['refvar',", "import Result from vmaf.tools.reader import YuvReader class FeatureExtractor(Executor): \"\"\" FeatureExtractor", "self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_psnr(yuv_type,", "# Modify by adding ADM noise floor outside cube root;", "np.array(result.result_dict[vif_den_scale2_scores_key])) ) result.result_dict[vif_scale3_scores_key] = list( (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) #", "'ansnr', 'motion', 'motion2', 'vif_num', 'vif_den', 'adm_num', 'adm_den', 'anpsnr', 'vif_num_scale0', 'vif_den_scale0',", "def get_score_key(cls, atom_feature): return \"{type}_{atom_feature}_score\".format( type=cls.TYPE, atom_feature=atom_feature) def _get_feature_scores(self, asset):", "(np.array(result.result_dict[vifdiff_num_scale0_scores_key]) / np.array(result.result_dict[vifdiff_den_scale0_scores_key])) ) result.result_dict[vifdiff_scale1_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale1_scores_key]) / np.array(result.result_dict[vifdiff_den_scale1_scores_key]))", "logger) class MsSsimFeatureExtractor(FeatureExtractor): TYPE = \"MS_SSIM_feature\" # VERSION = \"1.0\"", "= {} atom_feature_idx_dict = {} for atom_feature in self.ATOM_FEATURES: atom_feature_scores_dict[atom_feature]", "= cls.get_scores_key('refvar') ref1st_scores_key = cls.get_scores_key('ref1st') ref2nd_scores_key = cls.get_scores_key('ref2nd') disvar_scores_key =", "0 for ref_yuv in ref_yuv_reader: ref_y = ref_yuv[0] firstm =", "Override _get_feature_scores(self, asset), which read the feature scores from the", "for atom_feature in self.ATOM_FEATURES[1:]: assert len_score == len(atom_feature_scores_dict[atom_feature]), \\ \"Feature", "A FeatureExtractor must specify a unique type and version combination", "['ref1st', 'ref2nd', 'dis1st', 'dis2nd', ] DERIVED_ATOM_FEATURES = ['refvar', 'disvar', ]", "= cls.get_scores_key('vifdiff_scale1') vifdiff_scale2_scores_key = cls.get_scores_key('vifdiff_scale2') vifdiff_scale3_scores_key = cls.get_scores_key('vifdiff_scale3') result.result_dict[vifdiff_scale0_scores_key] =", "class SsimFeatureExtractor(FeatureExtractor): TYPE = \"SSIM_feature\" # VERSION = \"1.0\" VERSION", "= cls.get_scores_key('adm_num_scale2') adm_den_scale2_scores_key = cls.get_scores_key('adm_den_scale2') adm_num_scale3_scores_key = cls.get_scores_key('adm_num_scale3') adm_den_scale3_scores_key =", "0 with open(log_file_path, 'rt') as log_file: for line in log_file.readlines():", "'vifdiff_num_scale3', 'vifdiff_den_scale3', ] DERIVED_ATOM_FEATURES = ['vifdiff_scale0', 'vifdiff_scale1', 'vifdiff_scale2', 'vifdiff_scale3', ]", "= super(VifFrameDifferenceFeatureExtractor, cls)._post_process_result(result) # vifdiff_scalei = vifdiff_num_scalei / vifdiff_den_scalei, i", "atom_feature_scores_dict = {} atom_feature_idx_dict = {} for atom_feature in self.ATOM_FEATURES:", "again.\" feature_result = {} for atom_feature in self.ATOM_FEATURES: scores_key =", "/ np.array(result.result_dict[vifdiff_den_scale1_scores_key])) ) result.result_dict[vifdiff_scale2_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale2_scores_key]) / np.array(result.result_dict[vifdiff_den_scale2_scores_key])) )", "ref_y.mean() secondm = ref_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i +=", "scores in a dictionary format. log_file_path = self._get_log_file_path(asset) atom_feature_scores_dict =", "len_score != 0 for atom_feature in self.ATOM_FEATURES[1:]: assert len_score ==", "\\ to_list(map(get_var, zip(result.result_dict[ref1st_scores_key], result.result_dict[ref2nd_scores_key]))) result.result_dict[disvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[dis1st_scores_key], result.result_dict[dis2nd_scores_key])))", "line) if mo: cur_idx = int(mo.group(1)) assert cur_idx == atom_feature_idx_dict[atom_feature]", "VERSION 2) Override _generate_result(self, asset), which call a command-line executable", "= \\ # (((adm_num_scale0 + ADM_SCALE_CONSTANT) / (adm_den_scale0 + ADM_SCALE_CONSTANT))", "2 # ref1st, ref2nd _, num_dis_features = dis_scores_mtx.shape assert num_dis_features", "# VERSION = '0.2.2b' # expose adm_den/num_scalex # VERSION =", "corner case # VERSION = '0.2.2b' # expose adm_den/num_scalex #", "+= 1 dis_scores_mtx = np.vstack(scores_mtx_list) assert ref_scores_mtx is not None", "adm_scalei = adm_num_scalei / adm_den_scalei, i = 0, 1, 2,", "DERIVED_ATOM_FEATURES = ['vif_scale0', 'vif_scale1', 'vif_scale2', 'vif_scale3', 'vif2', 'adm2', 'adm3', 'adm_scale0',", "adm_num_scalei / adm_den_scalei, i = 0, 1, 2, 3 adm_num_scale0_scores_key", "= list( (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) ) result.result_dict[vif_scale2_scores_key] = list( (np.array(result.result_dict[vif_num_scale2_scores_key])", "['psnr'] def _generate_result(self, asset): # routine to call the command-line", "class MomentFeatureExtractor(FeatureExtractor): TYPE = \"Moment_feature\" # VERSION = \"1.0\" #", "= \"PSNR_feature\" VERSION = \"1.0\" ATOM_FEATURES = ['psnr'] def _generate_result(self,", "possibly corrupt. Run cleanup script and try again.\" feature_result =", "type and version combination (by the TYPE and VERSION attribute),", "int(mo.group(1)) assert cur_idx == atom_feature_idx_dict[atom_feature] # parse value, allowing NaN", "vifdiff_den_scalei, i = 0, 1, 2, 3 vifdiff_num_scale0_scores_key = cls.get_scores_key('vifdiff_num_scale0')", "self.executor_id return Result(asset, executor_id, result) @classmethod def get_scores_key(cls, atom_feature): return", "= cls.get_scores_key('vif2') result.result_dict[vif_scores_key] = list( ( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) +", "/ np.array(result.result_dict[vif_den_scale3_scores_key])) ) / 4.0 ) # adm_scalei = adm_num_scalei", "# + ((adm_num_scale3 + ADM_SCALE_CONSTANT) / (adm_den_scale3 + ADM_SCALE_CONSTANT))) /", "dis2nd feature_result = {} feature_result[self.get_scores_key('ref1st')] = list(ref_scores_mtx[:, 0]) feature_result[self.get_scores_key('ref2nd')] =", "cls.get_scores_key('ref1st') ref2nd_scores_key = cls.get_scores_key('ref2nd') disvar_scores_key = cls.get_scores_key('disvar') dis1st_scores_key = cls.get_scores_key('dis1st')", "vif2 = # ((vif_num_scale0 / vif_den_scale0) + (vif_num_scale1 / vif_den_scale1)", "cls.get_scores_key('vifdiff_den_scale1') vifdiff_num_scale2_scores_key = cls.get_scores_key('vifdiff_num_scale2') vifdiff_den_scale2_scores_key = cls.get_scores_key('vifdiff_den_scale2') vifdiff_num_scale3_scores_key = cls.get_scores_key('vifdiff_num_scale3')", "VERSION = \"1.1\" # fix OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES =", "in log_file.readlines(): for atom_feature in self.ATOM_FEATURES: re_template = \"{af}: ([0-9]+)", "= self.logger ExternalProgramCaller.call_vmaf_feature(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) @classmethod", "which read the feature scores from the log file, and", "= dis_yuv[0] firstm = dis_y.mean() secondm = dis_y.var() + firstm**2", "vifdiff_den_scale1_scores_key = cls.get_scores_key('vifdiff_den_scale1') vifdiff_num_scale2_scores_key = cls.get_scores_key('vifdiff_num_scale2') vifdiff_den_scale2_scores_key = cls.get_scores_key('vifdiff_den_scale2') vifdiff_num_scale3_scores_key", "cls.get_scores_key('adm2') adm_num_scores_key = cls.get_scores_key('adm_num') adm_den_scores_key = cls.get_scores_key('adm_den') result.result_dict[adm2_scores_key] = list(", "= cls.get_scores_key('vifdiff_scale3') result.result_dict[vifdiff_scale0_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale0_scores_key]) / np.array(result.result_dict[vifdiff_den_scale0_scores_key])) ) result.result_dict[vifdiff_scale1_scores_key]", "asset): # routine to call the command-line executable and generate", "np.array(result.result_dict[vif_den_scale3_scores_key])) ) # vif2 = # ((vif_num_scale0 / vif_den_scale0) +", "'ms_ssim_c_scale3', 'ms_ssim_s_scale3', 'ms_ssim_l_scale4', 'ms_ssim_c_scale4', 'ms_ssim_s_scale4', ] def _generate_result(self, asset): #", "cls.get_scores_key('adm_den_scale2') adm_num_scale3_scores_key = cls.get_scores_key('adm_num_scale3') adm_den_scale3_scores_key = cls.get_scores_key('adm_den_scale3') adm_scale0_scores_key = cls.get_scores_key('adm_scale0')", "atom_feature in self.ATOM_FEATURES: re_template = \"{af}: ([0-9]+) ([a-zA-Z0-9.-]+)\".format(af=atom_feature) mo =", "VERSION = '0.1' # vmaf_study; Anush's VIF fix # VERSION", "= cls.get_scores_key('vif_den_scale2') vif_num_scale3_scores_key = cls.get_scores_key('vif_num_scale3') vif_den_scale3_scores_key = cls.get_scores_key('vif_den_scale3') vif_scale0_scores_key =", "cls.get_scores_key('adm_num') adm_den_scores_key = cls.get_scores_key('adm_den') result.result_dict[adm2_scores_key] = list( (np.array(result.result_dict[adm_num_scores_key]) + cls.ADM2_CONSTANT)", "get_var = lambda m: m[1] - m[0] * m[0] result.result_dict[refvar_scores_key]", "adm_den_scale3_scores_key = cls.get_scores_key('adm_den_scale3') adm_scale0_scores_key = cls.get_scores_key('adm_scale0') adm_scale1_scores_key = cls.get_scores_key('adm_scale1') adm_scale2_scores_key", "example, follow VmafFeatureExtractor. \"\"\" __metaclass__ = ABCMeta @property @abstractmethod def", "'ms_ssim_c_scale2', 'ms_ssim_s_scale2', 'ms_ssim_l_scale3', 'ms_ssim_c_scale3', 'ms_ssim_s_scale3', 'ms_ssim_l_scale4', 'ms_ssim_c_scale4', 'ms_ssim_s_scale4', ] def", "ADM_SCALE_CONSTANT)) # + ((adm_num_scale1 + ADM_SCALE_CONSTANT) / (adm_den_scale1 + ADM_SCALE_CONSTANT))", ") result.result_dict[vifdiff_scale2_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale2_scores_key]) / np.array(result.result_dict[vifdiff_den_scale2_scores_key])) ) result.result_dict[vifdiff_scale3_scores_key] =", "list( (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) ) result.result_dict[vif_scale2_scores_key] = list( (np.array(result.result_dict[vif_num_scale2_scores_key]) /", "'0.2.4' # Fix a bug in adm feature passing scale", "TYPE and VERSION attribute), so that the Result generated by", "np.array(result.result_dict[vifdiff_den_scale0_scores_key])) ) result.result_dict[vifdiff_scale1_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale1_scores_key]) / np.array(result.result_dict[vifdiff_den_scale1_scores_key])) ) result.result_dict[vifdiff_scale2_scores_key]", "cls.get_scores_key('vifdiff_den_scale3') vifdiff_scale0_scores_key = cls.get_scores_key('vifdiff_scale0') vifdiff_scale1_scores_key = cls.get_scores_key('vifdiff_scale1') vifdiff_scale2_scores_key = cls.get_scores_key('vifdiff_scale2')", "cls.ADM_SCALE_CONSTANT)) ) / 4.0 ) # validate for feature in", "assert num_dis_features == 2 # dis1st, dis2nd feature_result = {}", "# vif2 = # ((vif_num_scale0 / vif_den_scale0) + (vif_num_scale1 /", "((vif_num_scale0 / vif_den_scale0) + (vif_num_scale1 / vif_den_scale1) + # (vif_num_scale2", "with YuvReader(filepath=asset.dis_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as dis_yuv_reader: scores_mtx_list = []", "= list(ref_scores_mtx[:, 0]) feature_result[self.get_scores_key('ref2nd')] = list(ref_scores_mtx[:, 1]) feature_result[self.get_scores_key('dis1st')] = list(dis_scores_mtx[:,", "adm_den_scale0_scores_key = cls.get_scores_key('adm_den_scale0') adm_num_scale1_scores_key = cls.get_scores_key('adm_num_scale1') adm_den_scale1_scores_key = cls.get_scores_key('adm_den_scale1') adm_num_scale2_scores_key", "vifdiff_num_scale2_scores_key = cls.get_scores_key('vifdiff_num_scale2') vifdiff_den_scale2_scores_key = cls.get_scores_key('vifdiff_den_scale2') vifdiff_num_scale3_scores_key = cls.get_scores_key('vifdiff_num_scale3') vifdiff_den_scale3_scores_key", "'vifdiff_num_scale1', 'vifdiff_den_scale1', 'vifdiff_num_scale2', 'vifdiff_den_scale2', 'vifdiff_num_scale3', 'vifdiff_den_scale3', ] DERIVED_ATOM_FEATURES = ['vifdiff_scale0',", "result = super(MomentFeatureExtractor, cls)._post_process_result(result) # calculate refvar and disvar from", "log_file_path = self._get_log_file_path(asset) with open(log_file_path, 'wt') as log_file: log_file.write(str(log_dict)) def", "cls.get_scores_key('vif_den_scale1') vif_num_scale2_scores_key = cls.get_scores_key('vif_num_scale2') vif_den_scale2_scores_key = cls.get_scores_key('vif_den_scale2') vif_num_scale3_scores_key = cls.get_scores_key('vif_num_scale3')", "yuv_type=self._get_workfile_yuv_type(asset)) as ref_yuv_reader: scores_mtx_list = [] i = 0 for", "= cls.get_scores_key('adm_den_scale0') adm_num_scale1_scores_key = cls.get_scores_key('adm_num_scale1') adm_den_scale1_scores_key = cls.get_scores_key('adm_den_scale1') adm_num_scale2_scores_key =", "h, log_file_path, logger) @classmethod def _post_process_result(cls, result): # override Executor._post_process_result", "OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES = ['ms_ssim', 'ms_ssim_l_scale0', 'ms_ssim_c_scale0', 'ms_ssim_s_scale0', 'ms_ssim_l_scale1',", "VERSION = \"1.1\" # python only ATOM_FEATURES = ['ref1st', 'ref2nd',", "ADM_SCALE_CONSTANT))) / 4.0 adm3_scores_key = cls.get_scores_key('adm3') result.result_dict[adm3_scores_key] = list( (", "from abc import ABCMeta, abstractmethod import os from vmaf.tools.misc import", "super(VmafFeatureExtractor, cls)._post_process_result(result) # adm2 = # (adm_num + ADM2_CONSTANT) /", "and generate feature # scores in the log file. quality_w,", "/ np.array(result.result_dict[vif_den_scale1_scores_key])) ) result.result_dict[vif_scale2_scores_key] = list( (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) )", "((np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT)) ) / 4.0", "= 0 def _generate_result(self, asset): # routine to call the", "list( (np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale3_scores_key]", "+ cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT))", ") result.result_dict[adm_scale1_scores_key] = list( (np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) +", "dis_yuv in dis_yuv_reader: dis_y = dis_yuv[0] firstm = dis_y.mean() secondm", "cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale1_scores_key] = list( (np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key])", "def get_scores_key(cls, atom_feature): return \"{type}_{atom_feature}_scores\".format( type=cls.TYPE, atom_feature=atom_feature) @classmethod def get_score_key(cls,", "= \"Apache, Version 2.0\" import re import numpy as np", "((adm_num_scale2 + ADM_SCALE_CONSTANT) / (adm_den_scale2 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale3", "+ cls.ADM_SCALE_CONSTANT)) ) / 4.0 ) # validate for feature", "class VifFrameDifferenceFeatureExtractor(FeatureExtractor): TYPE = \"VifDiff_feature\" VERSION = '0.1' ATOM_FEATURES =", ") result.result_dict[vifdiff_scale1_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale1_scores_key]) / np.array(result.result_dict[vifdiff_den_scale1_scores_key])) ) result.result_dict[vifdiff_scale2_scores_key] =", "in self.ATOM_FEATURES: re_template = \"{af}: ([0-9]+) ([a-zA-Z0-9.-]+)\".format(af=atom_feature) mo = re.match(re_template,", "'ms_ssim_c_scale4', 'ms_ssim_s_scale4', ] def _generate_result(self, asset): # routine to call", "/ np.array(result.result_dict[vifdiff_den_scale3_scores_key])) ) # validate for feature in cls.DERIVED_ATOM_FEATURES: assert", "firstm = ref_y.mean() secondm = ref_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm])))", "'vif2', 'adm2', 'adm3', 'adm_scale0', 'adm_scale1', 'adm_scale2', 'adm_scale3', ] ADM2_CONSTANT =", "numpy as np import ast from vmaf import ExternalProgramCaller, to_list", "result class PsnrFeatureExtractor(FeatureExtractor): TYPE = \"PSNR_feature\" VERSION = \"1.0\" ATOM_FEATURES", "a log file. 3) Override _get_feature_scores(self, asset), which read the", "h, log_file_path, logger) class MomentFeatureExtractor(FeatureExtractor): TYPE = \"Moment_feature\" # VERSION", "\"Moment_feature\" # VERSION = \"1.0\" # call executable VERSION =", "\"1.1\" # python only ATOM_FEATURES = ['ref1st', 'ref2nd', 'dis1st', 'dis2nd',", "Executor._post_process_result result = super(MomentFeatureExtractor, cls)._post_process_result(result) # calculate refvar and disvar", "VMAF convolution; update adm features by folding noise floor into", "result.result_dict[dis2nd_scores_key]))) # validate for feature in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in", "assert cls.get_scores_key(feature) in result.result_dict return result class VifFrameDifferenceFeatureExtractor(FeatureExtractor): TYPE =", "file, and return # the scores in a dictionary format.", "log file, and return # the scores in a dictionary", "adm_scale1_scores_key = cls.get_scores_key('adm_scale1') adm_scale2_scores_key = cls.get_scores_key('adm_scale2') adm_scale3_scores_key = cls.get_scores_key('adm_scale3') result.result_dict[adm_scale0_scores_key]", "ExternalProgramCaller.call_ssim(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) class MsSsimFeatureExtractor(FeatureExtractor): TYPE", "vif_den_scale1) + # (vif_num_scale2 / vif_den_scale2) + (vif_num_scale3 / vif_den_scale3))", "secondm = ref_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1", "vif_scalei = vif_num_scalei / vif_den_scalei, i = 0, 1, 2,", "= \"1.1\" # fix OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES = ['ssim',", "yuv_type=self._get_workfile_yuv_type(asset)) as dis_yuv_reader: scores_mtx_list = [] i = 0 for", "= list( (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) # vif2 = #", "os from vmaf.tools.misc import make_absolute_path, run_process from vmaf.tools.stats import ListStats", "log_file.readlines(): for atom_feature in self.ATOM_FEATURES: re_template = \"{af}: ([0-9]+) ([a-zA-Z0-9.-]+)\".format(af=atom_feature)", "= 0, 1, 2, 3 vifdiff_num_scale0_scores_key = cls.get_scores_key('vifdiff_num_scale0') vifdiff_den_scale0_scores_key =", "@classmethod def get_score_key(cls, atom_feature): return \"{type}_{atom_feature}_score\".format( type=cls.TYPE, atom_feature=atom_feature) def _get_feature_scores(self,", "Override _generate_result(self, asset), which call a command-line executable and generate", "the log file. quality_width, quality_height = asset.quality_width_height log_file_path = self._get_log_file_path(asset)", "outside cube root; add derived feature motion2 VERSION = '0.2.4c'", "adm_scale2_scores_key = cls.get_scores_key('adm_scale2') adm_scale3_scores_key = cls.get_scores_key('adm_scale3') result.result_dict[adm_scale0_scores_key] = list( (np.array(result.result_dict[adm_num_scale0_scores_key])", "ref_scores_mtx = np.vstack(scores_mtx_list) dis_scores_mtx = None with YuvReader(filepath=asset.dis_workfile_path, width=quality_w, height=quality_h,", "line in log_file.readlines(): for atom_feature in self.ATOM_FEATURES: re_template = \"{af}:", "# adm3 = \\ # (((adm_num_scale0 + ADM_SCALE_CONSTANT) / (adm_den_scale0", "'ms_ssim_l_scale0', 'ms_ssim_c_scale0', 'ms_ssim_s_scale0', 'ms_ssim_l_scale1', 'ms_ssim_c_scale1', 'ms_ssim_s_scale1', 'ms_ssim_l_scale2', 'ms_ssim_c_scale2', 'ms_ssim_s_scale2', 'ms_ssim_l_scale3',", "def _post_process_result(cls, result): # override Executor._post_process_result result = super(VifFrameDifferenceFeatureExtractor, cls)._post_process_result(result)", "= cls.get_scores_key('vifdiff_num_scale2') vifdiff_den_scale2_scores_key = cls.get_scores_key('vifdiff_den_scale2') vifdiff_num_scale3_scores_key = cls.get_scores_key('vifdiff_num_scale3') vifdiff_den_scale3_scores_key =", "Executor from vmaf.core.result import Result from vmaf.tools.reader import YuvReader class", "\"Feature data possibly corrupt. Run cleanup script and try again.\"", "log_file: log_file.write(str(log_dict)) def _get_feature_scores(self, asset): # routine to read the", "# (vif_num_scale2 / vif_den_scale2) + (vif_num_scale3 / vif_den_scale3)) / 4.0", "ATOM_FEATURES = ['vif', 'adm', 'ansnr', 'motion', 'motion2', 'vif_num', 'vif_den', 'adm_num',", "/ adm_den_scalei, i = 0, 1, 2, 3 adm_num_scale0_scores_key =", "in a dictionary format. For an example, follow VmafFeatureExtractor. \"\"\"", "list( ( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) + (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) +", "cls.get_scores_key('vifdiff_den_scale0') vifdiff_num_scale1_scores_key = cls.get_scores_key('vifdiff_num_scale1') vifdiff_den_scale1_scores_key = cls.get_scores_key('vifdiff_den_scale1') vifdiff_num_scale2_scores_key = cls.get_scores_key('vifdiff_num_scale2')", "\"1.0\" ATOM_FEATURES = ['psnr'] def _generate_result(self, asset): # routine to", "w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_ssim(yuv_type, ref_path, dis_path, w, h,", "+ cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale1_scores_key] = list( (np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) /", "uniform reading with option of offset for input YUV, updated", "= \"1.0\" # call executable VERSION = \"1.1\" # python", "i += 1 dis_scores_mtx = np.vstack(scores_mtx_list) assert ref_scores_mtx is not", "file, and return the scores in a dictionary format. For", "a dictionary format. For an example, follow VmafFeatureExtractor. \"\"\" __metaclass__", "make_absolute_path, run_process from vmaf.tools.stats import ListStats __copyright__ = \"Copyright 2016-2018,", "re import numpy as np import ast from vmaf import", "'disvar', ] def _generate_result(self, asset): # routine to call the", "/ 4.0 ) # adm_scalei = adm_num_scalei / adm_den_scalei, i", "vif_scale2_scores_key = cls.get_scores_key('vif_scale2') vif_scale3_scores_key = cls.get_scores_key('vif_scale3') result.result_dict[vif_scale0_scores_key] = list( (np.array(result.result_dict[vif_num_scale0_scores_key])", "features by folding noise floor into per coef # VERSION", "# scores in the log file. quality_width, quality_height = asset.quality_width_height", "def _read_result(self, asset): result = {} result.update(self._get_feature_scores(asset)) executor_id = self.executor_id", "+= 1 ref_scores_mtx = np.vstack(scores_mtx_list) dis_scores_mtx = None with YuvReader(filepath=asset.dis_workfile_path,", "__copyright__ = \"Copyright 2016-2018, Netflix, Inc.\" __license__ = \"Apache, Version", "dis_scores_mtx = np.vstack(scores_mtx_list) assert ref_scores_mtx is not None and dis_scores_mtx", "import VmafConfig, VmafExternalConfig from vmaf.core.executor import Executor from vmaf.core.result import", "num/den of each scale # VERSION = '0.2.2' # adm", "= '0.2' # expose vif_num, vif_den, adm_num, adm_den, anpsnr #", "a list of corresponding results. A FeatureExtractor must specify a", "Modify by moving motion2 to c code ATOM_FEATURES = ['vif',", "VERSION = '0.2.4b' # Modify by adding ADM noise floor", "return a list of corresponding results. A FeatureExtractor must specify", "as log_file: for line in log_file.readlines(): for atom_feature in self.ATOM_FEATURES:", "= dis_scores_mtx.shape assert num_dis_features == 2 # dis1st, dis2nd feature_result", "((np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale3_scores_key]) +", "cls.ADM2_CONSTANT) ) # vif_scalei = vif_num_scalei / vif_den_scalei, i =", "@classmethod def _post_process_result(cls, result): # override Executor._post_process_result result = super(VifFrameDifferenceFeatureExtractor,", "'vif_scale2', 'vif_scale3', 'vif2', 'adm2', 'adm3', 'adm_scale0', 'adm_scale1', 'adm_scale2', 'adm_scale3', ]", "dwt_quant_step # VERSION = '0.2.4b' # Modify by adding ADM", "'wt') as log_file: log_file.write(str(log_dict)) def _get_feature_scores(self, asset): # routine to", "result.result_dict[adm_scale3_scores_key] = list( (np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT)", "vif num/den of each scale # VERSION = '0.2.2' #", "= 0 for ref_yuv in ref_yuv_reader: ref_y = ref_yuv[0] firstm", "+ ADM2_CONSTANT) / (adm_den + ADM2_CONSTANT) adm2_scores_key = cls.get_scores_key('adm2') adm_num_scores_key", "len_score == len(atom_feature_scores_dict[atom_feature]), \\ \"Feature data possibly corrupt. Run cleanup", "# expose vif_num, vif_den, adm_num, adm_den, anpsnr # VERSION =", "adm_num_scale1_scores_key = cls.get_scores_key('adm_num_scale1') adm_den_scale1_scores_key = cls.get_scores_key('adm_den_scale1') adm_num_scale2_scores_key = cls.get_scores_key('adm_num_scale2') adm_den_scale2_scores_key", "'vifdiff_scale1', 'vifdiff_scale2', 'vifdiff_scale3', ] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT = 0", "return # the scores in a dictionary format. log_file_path =", "h=quality_height logger = self.logger ExternalProgramCaller.call_vmaf_feature(yuv_type, ref_path, dis_path, w, h, log_file_path,", "feature scores from the log file, and return # the", "ref2nd, dis1st, dis2nd refvar_scores_key = cls.get_scores_key('refvar') ref1st_scores_key = cls.get_scores_key('ref1st') ref2nd_scores_key", "feature motion2 VERSION = '0.2.4c' # Modify by moving motion2", "m[0] result.result_dict[refvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[ref1st_scores_key], result.result_dict[ref2nd_scores_key]))) result.result_dict[disvar_scores_key] = \\", "moving motion2 to c code ATOM_FEATURES = ['vif', 'adm', 'ansnr',", "\"PSNR_feature\" VERSION = \"1.0\" ATOM_FEATURES = ['psnr'] def _generate_result(self, asset):", "'adm3', 'adm_scale0', 'adm_scale1', 'adm_scale2', 'adm_scale3', ] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT", "= \"SSIM_feature\" # VERSION = \"1.0\" VERSION = \"1.1\" #", "return feature_result @classmethod def _post_process_result(cls, result): # override Executor._post_process_result result", "= self.get_scores_key(atom_feature) feature_result[scores_key] = atom_feature_scores_dict[atom_feature] return feature_result class VmafFeatureExtractor(FeatureExtractor): TYPE", "['vif_scale0', 'vif_scale1', 'vif_scale2', 'vif_scale3', 'vif2', 'adm2', 'adm3', 'adm_scale0', 'adm_scale1', 'adm_scale2',", "import ListStats __copyright__ = \"Copyright 2016-2018, Netflix, Inc.\" __license__ =", "num_ref_features == 2 # ref1st, ref2nd _, num_dis_features = dis_scores_mtx.shape", "+ ADM_SCALE_CONSTANT) / (adm_den_scale3 + ADM_SCALE_CONSTANT))) / 4.0 adm3_scores_key =", "assert cls.get_scores_key(feature) in result.result_dict return result class PsnrFeatureExtractor(FeatureExtractor): TYPE =", "list(dis_scores_mtx[:, 1]) return feature_result @classmethod def _post_process_result(cls, result): # override", "run_process from vmaf.tools.stats import ListStats __copyright__ = \"Copyright 2016-2018, Netflix,", "= self.executor_id return Result(asset, executor_id, result) @classmethod def get_scores_key(cls, atom_feature):", "cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) /", "feature in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in result.result_dict return result class", "= 0 ADM_SCALE_CONSTANT = 0 def _generate_result(self, asset): # routine", "(adm_den_scale2 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale3 + ADM_SCALE_CONSTANT) / (adm_den_scale3", "and inf val = float(mo.group(2)) if np.isnan(val) or np.isinf(val): val", "the scores in a dictionary format. For an example, follow", "height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as dis_yuv_reader: scores_mtx_list = [] i = 0", "list( (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) # vif2 = # ((vif_num_scale0", "adm_num, adm_den, anpsnr # VERSION = '0.2.1' # expose vif", "extraction on them, and return a list of corresponding results.", "ref_yuv[0] firstm = ref_y.mean() secondm = ref_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm],", ") # validate for feature in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in", "# routine to read the feature scores from the log", "= cls.get_scores_key('vifdiff_num_scale3') vifdiff_den_scale3_scores_key = cls.get_scores_key('vifdiff_den_scale3') vifdiff_scale0_scores_key = cls.get_scores_key('vifdiff_scale0') vifdiff_scale1_scores_key =", "vif_den_scale1_scores_key = cls.get_scores_key('vif_den_scale1') vif_num_scale2_scores_key = cls.get_scores_key('vif_num_scale2') vif_den_scale2_scores_key = cls.get_scores_key('vif_den_scale2') vif_num_scale3_scores_key", "cls.get_scores_key('vifdiff_scale0') vifdiff_scale1_scores_key = cls.get_scores_key('vifdiff_scale1') vifdiff_scale2_scores_key = cls.get_scores_key('vifdiff_scale2') vifdiff_scale3_scores_key = cls.get_scores_key('vifdiff_scale3')", "for ref_yuv in ref_yuv_reader: ref_y = ref_yuv[0] firstm = ref_y.mean()", "logger = self.logger ExternalProgramCaller.call_ms_ssim(yuv_type, ref_path, dis_path, w, h, log_file_path, logger)", "asset.quality_width_height log_file_path = self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger", "'ssim_l', 'ssim_c', 'ssim_s'] def _generate_result(self, asset): # routine to call", "= np.array(log_dict['ref_scores_mtx']) dis_scores_mtx = np.array(log_dict['dis_scores_mtx']) _, num_ref_features = ref_scores_mtx.shape assert", "dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_psnr(yuv_type, ref_path, dis_path, w,", "vif_num_scale0_scores_key = cls.get_scores_key('vif_num_scale0') vif_den_scale0_scores_key = cls.get_scores_key('vif_den_scale0') vif_num_scale1_scores_key = cls.get_scores_key('vif_num_scale1') vif_den_scale1_scores_key", "data possibly corrupt. Run cleanup script and try again.\" feature_result", "{} for atom_feature in self.ATOM_FEATURES: scores_key = self.get_scores_key(atom_feature) feature_result[scores_key] =", "# adm2 = # (adm_num + ADM2_CONSTANT) / (adm_den +", "(np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) +", "= {} for atom_feature in self.ATOM_FEATURES: scores_key = self.get_scores_key(atom_feature) feature_result[scores_key]", "adm abs-->fabs, corrected border handling, uniform reading with option of", "= cls.get_scores_key('vifdiff_num_scale1') vifdiff_den_scale1_scores_key = cls.get_scores_key('vifdiff_den_scale1') vifdiff_num_scale2_scores_key = cls.get_scores_key('vifdiff_num_scale2') vifdiff_den_scale2_scores_key =", "= cls.get_scores_key('vifdiff_den_scale2') vifdiff_num_scale3_scores_key = cls.get_scores_key('vifdiff_num_scale3') vifdiff_den_scale3_scores_key = cls.get_scores_key('vifdiff_den_scale3') vifdiff_scale0_scores_key =", "self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_ssim(yuv_type,", "in a dictionary format. log_file_path = self._get_log_file_path(asset) with open(log_file_path, 'rt')", "the feature scores from the log file, and return the", "= ref_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1 ref_scores_mtx", "in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in result.result_dict return result class SsimFeatureExtractor(FeatureExtractor):", "ADM_SCALE_CONSTANT) / (adm_den_scale2 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale3 + ADM_SCALE_CONSTANT)", "self._get_log_file_path(asset) atom_feature_scores_dict = {} atom_feature_idx_dict = {} for atom_feature in", "YUV, updated VIF corner case # VERSION = '0.2.2b' #", "/ vif_den_scale0) + (vif_num_scale1 / vif_den_scale1) + # (vif_num_scale2 /", "as log_file: log_file.write(str(log_dict)) def _get_feature_scores(self, asset): # routine to read", "= ['ms_ssim', 'ms_ssim_l_scale0', 'ms_ssim_c_scale0', 'ms_ssim_s_scale0', 'ms_ssim_l_scale1', 'ms_ssim_c_scale1', 'ms_ssim_s_scale1', 'ms_ssim_l_scale2', 'ms_ssim_c_scale2',", "/ 4.0 ) # validate for feature in cls.DERIVED_ATOM_FEATURES: assert", "vif_den_scale2) + (vif_num_scale3 / vif_den_scale3)) / 4.0 vif_scores_key = cls.get_scores_key('vif2')", "dis1st_scores_key = cls.get_scores_key('dis1st') dis2nd_scores_key = cls.get_scores_key('dis2nd') get_var = lambda m:", "1 continue len_score = len(atom_feature_scores_dict[self.ATOM_FEATURES[0]]) assert len_score != 0 for", "result.update(self._get_feature_scores(asset)) executor_id = self.executor_id return Result(asset, executor_id, result) @classmethod def", "result class SsimFeatureExtractor(FeatureExtractor): TYPE = \"SSIM_feature\" # VERSION = \"1.0\"", "= cls.get_scores_key('adm_num_scale3') adm_den_scale3_scores_key = cls.get_scores_key('adm_den_scale3') adm_scale0_scores_key = cls.get_scores_key('adm_scale0') adm_scale1_scores_key =", "1, 2, 3 adm_num_scale0_scores_key = cls.get_scores_key('adm_num_scale0') adm_den_scale0_scores_key = cls.get_scores_key('adm_den_scale0') adm_num_scale1_scores_key", "an example, follow VmafFeatureExtractor. \"\"\" __metaclass__ = ABCMeta @property @abstractmethod", "np.isinf(val): val = None atom_feature_scores_dict[atom_feature].append(val) atom_feature_idx_dict[atom_feature] += 1 continue len_score", "executable VERSION = \"1.1\" # python only ATOM_FEATURES = ['ref1st',", "file. quality_width, quality_height = asset.quality_width_height log_file_path = self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path", "log file. quality_w, quality_h = asset.quality_width_height ref_scores_mtx = None with", "list( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) ) result.result_dict[vif_scale1_scores_key] = list( (np.array(result.result_dict[vif_num_scale1_scores_key]) /", "dis_scores_mtx.shape assert num_dis_features == 2 # dis1st, dis2nd feature_result =", "Result from vmaf.tools.reader import YuvReader class FeatureExtractor(Executor): \"\"\" FeatureExtractor takes", "+ cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT)", "m[1] - m[0] * m[0] result.result_dict[refvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[ref1st_scores_key],", "2.0\" import re import numpy as np import ast from", "] DERIVED_ATOM_FEATURES = ['vif_scale0', 'vif_scale1', 'vif_scale2', 'vif_scale3', 'vif2', 'adm2', 'adm3',", "+ ADM_SCALE_CONSTANT)) # + ((adm_num_scale3 + ADM_SCALE_CONSTANT) / (adm_den_scale3 +", "file. 3) Override _get_feature_scores(self, asset), which read the feature scores", "'vifdiff_num_scale2', 'vifdiff_den_scale2', 'vifdiff_num_scale3', 'vifdiff_den_scale3', ] DERIVED_ATOM_FEATURES = ['vifdiff_scale0', 'vifdiff_scale1', 'vifdiff_scale2',", "vifdiff_num_scale3_scores_key = cls.get_scores_key('vifdiff_num_scale3') vifdiff_den_scale3_scores_key = cls.get_scores_key('vifdiff_den_scale3') vifdiff_scale0_scores_key = cls.get_scores_key('vifdiff_scale0') vifdiff_scale1_scores_key", "if np.isnan(val) or np.isinf(val): val = None atom_feature_scores_dict[atom_feature].append(val) atom_feature_idx_dict[atom_feature] +=", ") result.result_dict[vif_scale2_scores_key] = list( (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) ) result.result_dict[vif_scale3_scores_key] =", "generate feature # scores in the log file. quality_w, quality_h", "h=quality_height logger = self.logger ExternalProgramCaller.call_ssim(yuv_type, ref_path, dis_path, w, h, log_file_path,", "'vifdiff_num', 'vifdiff_den', 'vifdiff_num_scale0', 'vifdiff_den_scale0', 'vifdiff_num_scale1', 'vifdiff_den_scale1', 'vifdiff_num_scale2', 'vifdiff_den_scale2', 'vifdiff_num_scale3', 'vifdiff_den_scale3',", "each scale # VERSION = '0.2.2' # adm abs-->fabs, corrected", "import make_absolute_path, run_process from vmaf.tools.stats import ListStats __copyright__ = \"Copyright", "import numpy as np import ast from vmaf import ExternalProgramCaller,", "(np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale1_scores_key] =", "for atom_feature in self.ATOM_FEATURES: re_template = \"{af}: ([0-9]+) ([a-zA-Z0-9.-]+)\".format(af=atom_feature) mo", "= '0.2.2' # adm abs-->fabs, corrected border handling, uniform reading", "= \"1.0\" VERSION = \"1.1\" # fix OPT_RANGE_PIXEL_OFFSET = 0", "class of FeatureExtractor must: 1) Override TYPE and VERSION 2)", "dis_path, w, h, log_file_path, logger) @classmethod def _post_process_result(cls, result): #", "val = float(mo.group(2)) if np.isnan(val) or np.isinf(val): val = None", "'0.2.3' # AVX for VMAF convolution; update adm features by", "None atom_feature_scores_dict[atom_feature].append(val) atom_feature_idx_dict[atom_feature] += 1 continue len_score = len(atom_feature_scores_dict[self.ATOM_FEATURES[0]]) assert", "is not None log_dict = {'ref_scores_mtx': ref_scores_mtx.tolist(), 'dis_scores_mtx': dis_scores_mtx.tolist()} log_file_path", "atom_feature_idx_dict = {} for atom_feature in self.ATOM_FEATURES: atom_feature_scores_dict[atom_feature] = []", "super(MomentFeatureExtractor, cls)._post_process_result(result) # calculate refvar and disvar from ref1st, ref2nd,", "feature_result[scores_key] = atom_feature_scores_dict[atom_feature] return feature_result class VmafFeatureExtractor(FeatureExtractor): TYPE = \"VMAF_feature\"", "fix OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES = ['ms_ssim', 'ms_ssim_l_scale0', 'ms_ssim_c_scale0', 'ms_ssim_s_scale0',", "cls.get_scores_key('adm_den_scale1') adm_num_scale2_scores_key = cls.get_scores_key('adm_num_scale2') adm_den_scale2_scores_key = cls.get_scores_key('adm_den_scale2') adm_num_scale3_scores_key = cls.get_scores_key('adm_num_scale3')", "dis2nd refvar_scores_key = cls.get_scores_key('refvar') ref1st_scores_key = cls.get_scores_key('ref1st') ref2nd_scores_key = cls.get_scores_key('ref2nd')", "cls.get_scores_key('refvar') ref1st_scores_key = cls.get_scores_key('ref1st') ref2nd_scores_key = cls.get_scores_key('ref2nd') disvar_scores_key = cls.get_scores_key('disvar')", "= [] atom_feature_idx_dict[atom_feature] = 0 with open(log_file_path, 'rt') as log_file:", "= ['vifdiff', 'vifdiff_num', 'vifdiff_den', 'vifdiff_num_scale0', 'vifdiff_den_scale0', 'vifdiff_num_scale1', 'vifdiff_den_scale1', 'vifdiff_num_scale2', 'vifdiff_den_scale2',", "'ms_ssim_s_scale2', 'ms_ssim_l_scale3', 'ms_ssim_c_scale3', 'ms_ssim_s_scale3', 'ms_ssim_l_scale4', 'ms_ssim_c_scale4', 'ms_ssim_s_scale4', ] def _generate_result(self,", "by moving motion2 to c code ATOM_FEATURES = ['vif', 'adm',", "+ (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) + (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) /", "= cls.get_scores_key('vif_den_scale0') vif_num_scale1_scores_key = cls.get_scores_key('vif_num_scale1') vif_den_scale1_scores_key = cls.get_scores_key('vif_den_scale1') vif_num_scale2_scores_key =", "the feature scores from the log file, and return #", "in self.ATOM_FEATURES: atom_feature_scores_dict[atom_feature] = [] atom_feature_idx_dict[atom_feature] = 0 with open(log_file_path,", "h=quality_height logger = self.logger ExternalProgramCaller.call_psnr(yuv_type, ref_path, dis_path, w, h, log_file_path,", "log_file: log_str = log_file.read() log_dict = ast.literal_eval(log_str) ref_scores_mtx = np.array(log_dict['ref_scores_mtx'])", "'anpsnr', 'vif_num_scale0', 'vif_den_scale0', 'vif_num_scale1', 'vif_den_scale1', 'vif_num_scale2', 'vif_den_scale2', 'vif_num_scale3', 'vif_den_scale3', 'adm_num_scale0',", "= lambda m: m[1] - m[0] * m[0] result.result_dict[refvar_scores_key] =", "= asset.quality_width_height ref_scores_mtx = None with YuvReader(filepath=asset.ref_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset))", "# VERSION = \"1.0\" # call executable VERSION = \"1.1\"", "'vif_den', 'adm_num', 'adm_den', 'anpsnr', 'vif_num_scale0', 'vif_den_scale0', 'vif_num_scale1', 'vif_den_scale1', 'vif_num_scale2', 'vif_den_scale2',", "result.result_dict[vif_scale3_scores_key] = list( (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) # vif2 =", "call executable VERSION = \"1.1\" # python only ATOM_FEATURES =", "from vmaf import ExternalProgramCaller, to_list from vmaf.config import VmafConfig, VmafExternalConfig", "@property @abstractmethod def ATOM_FEATURES(self): raise NotImplementedError def _read_result(self, asset): result", "scores_key = self.get_scores_key(atom_feature) feature_result[scores_key] = atom_feature_scores_dict[atom_feature] return feature_result class VmafFeatureExtractor(FeatureExtractor):", "ADM noise floor outside cube root; add derived feature motion2", "result.result_dict[adm_scale2_scores_key] = list( (np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT)", "vmaf.tools.reader import YuvReader class FeatureExtractor(Executor): \"\"\" FeatureExtractor takes in a", "override Executor._post_process_result result = super(MomentFeatureExtractor, cls)._post_process_result(result) # calculate refvar and", "vif_num, vif_den, adm_num, adm_den, anpsnr # VERSION = '0.2.1' #", "to_list from vmaf.config import VmafConfig, VmafExternalConfig from vmaf.core.executor import Executor", "/ (adm_den_scale3 + ADM_SCALE_CONSTANT))) / 4.0 adm3_scores_key = cls.get_scores_key('adm3') result.result_dict[adm3_scores_key]", "routine to call the command-line executable and generate feature #", "which call a command-line executable and generate feature scores in", "atom_feature=atom_feature) def _get_feature_scores(self, asset): # routine to read the feature", "= \"Copyright 2016-2018, Netflix, Inc.\" __license__ = \"Apache, Version 2.0\"", "dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_ssim(yuv_type, ref_path, dis_path, w,", "executor_id = self.executor_id return Result(asset, executor_id, result) @classmethod def get_scores_key(cls,", "num_dis_features = dis_scores_mtx.shape assert num_dis_features == 2 # dis1st, dis2nd", "cls.get_scores_key(feature) in result.result_dict return result class PsnrFeatureExtractor(FeatureExtractor): TYPE = \"PSNR_feature\"", "and return # the scores in a dictionary format. log_file_path", "feature extraction on them, and return a list of corresponding", "ExternalProgramCaller.call_psnr(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) class MomentFeatureExtractor(FeatureExtractor): TYPE", "the TYPE and VERSION attribute), so that the Result generated", "return \"{type}_{atom_feature}_score\".format( type=cls.TYPE, atom_feature=atom_feature) def _get_feature_scores(self, asset): # routine to", "\"1.1\" # fix OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES = ['ssim', 'ssim_l',", "'adm_num_scale0', 'adm_den_scale0', 'adm_num_scale1', 'adm_den_scale1', 'adm_num_scale2', 'adm_den_scale2', 'adm_num_scale3', 'adm_den_scale3', ] DERIVED_ATOM_FEATURES", "'adm_num_scale1', 'adm_den_scale1', 'adm_num_scale2', 'adm_den_scale2', 'adm_num_scale3', 'adm_den_scale3', ] DERIVED_ATOM_FEATURES = ['vif_scale0',", "SsimFeatureExtractor(FeatureExtractor): TYPE = \"SSIM_feature\" # VERSION = \"1.0\" VERSION =", "['refvar', 'disvar', ] def _generate_result(self, asset): # routine to call", "unique type and version combination (by the TYPE and VERSION", "([a-zA-Z0-9.-]+)\".format(af=atom_feature) mo = re.match(re_template, line) if mo: cur_idx = int(mo.group(1))", "= cls.get_scores_key('adm_scale1') adm_scale2_scores_key = cls.get_scores_key('adm_scale2') adm_scale3_scores_key = cls.get_scores_key('adm_scale3') result.result_dict[adm_scale0_scores_key] =", "must: 1) Override TYPE and VERSION 2) Override _generate_result(self, asset),", "list( (np.array(result.result_dict[vifdiff_num_scale0_scores_key]) / np.array(result.result_dict[vifdiff_den_scale0_scores_key])) ) result.result_dict[vifdiff_scale1_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale1_scores_key]) /", "= \"1.1\" # fix OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES = ['ms_ssim',", "from vmaf.config import VmafConfig, VmafExternalConfig from vmaf.core.executor import Executor from", "h, log_file_path, logger) class MsSsimFeatureExtractor(FeatureExtractor): TYPE = \"MS_SSIM_feature\" # VERSION", "# calculate refvar and disvar from ref1st, ref2nd, dis1st, dis2nd", "VERSION = '0.2.4c' # Modify by moving motion2 to c", "2, 3 adm_num_scale0_scores_key = cls.get_scores_key('adm_num_scale0') adm_den_scale0_scores_key = cls.get_scores_key('adm_den_scale0') adm_num_scale1_scores_key =", "logger) class MomentFeatureExtractor(FeatureExtractor): TYPE = \"Moment_feature\" # VERSION = \"1.0\"", "np.vstack(scores_mtx_list) dis_scores_mtx = None with YuvReader(filepath=asset.dis_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as", "vif_scale3_scores_key = cls.get_scores_key('vif_scale3') result.result_dict[vif_scale0_scores_key] = list( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) )", "abc import ABCMeta, abstractmethod import os from vmaf.tools.misc import make_absolute_path,", "list( (np.array(result.result_dict[vifdiff_num_scale1_scores_key]) / np.array(result.result_dict[vifdiff_den_scale1_scores_key])) ) result.result_dict[vifdiff_scale2_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale2_scores_key]) /", "# VERSION = '0.2.2' # adm abs-->fabs, corrected border handling,", "self.ATOM_FEATURES: re_template = \"{af}: ([0-9]+) ([a-zA-Z0-9.-]+)\".format(af=atom_feature) mo = re.match(re_template, line)", ") result.result_dict[vifdiff_scale3_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale3_scores_key]) / np.array(result.result_dict[vifdiff_den_scale3_scores_key])) ) # validate", "= cls.get_scores_key('vifdiff_den_scale1') vifdiff_num_scale2_scores_key = cls.get_scores_key('vifdiff_num_scale2') vifdiff_den_scale2_scores_key = cls.get_scores_key('vifdiff_den_scale2') vifdiff_num_scale3_scores_key =", "result = {} result.update(self._get_feature_scores(asset)) executor_id = self.executor_id return Result(asset, executor_id,", "'0.1' ATOM_FEATURES = ['vifdiff', 'vifdiff_num', 'vifdiff_den', 'vifdiff_num_scale0', 'vifdiff_den_scale0', 'vifdiff_num_scale1', 'vifdiff_den_scale1',", "vmaf.tools.misc import make_absolute_path, run_process from vmaf.tools.stats import ListStats __copyright__ =", "border handling, uniform reading with option of offset for input", "ref_scores_mtx.shape assert num_ref_features == 2 # ref1st, ref2nd _, num_dis_features", "'vifdiff_den_scale1', 'vifdiff_num_scale2', 'vifdiff_den_scale2', 'vifdiff_num_scale3', 'vifdiff_den_scale3', ] DERIVED_ATOM_FEATURES = ['vifdiff_scale0', 'vifdiff_scale1',", "= cls.get_scores_key('adm_num_scale1') adm_den_scale1_scores_key = cls.get_scores_key('adm_den_scale1') adm_num_scale2_scores_key = cls.get_scores_key('adm_num_scale2') adm_den_scale2_scores_key =", "adm_den_scores_key = cls.get_scores_key('adm_den') result.result_dict[adm2_scores_key] = list( (np.array(result.result_dict[adm_num_scores_key]) + cls.ADM2_CONSTANT) /", "np.array(result.result_dict[vif_den_scale0_scores_key])) + (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) + (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) +", "'vif_den_scale1', 'vif_num_scale2', 'vif_den_scale2', 'vif_num_scale3', 'vif_den_scale3', 'adm_num_scale0', 'adm_den_scale0', 'adm_num_scale1', 'adm_den_scale1', 'adm_num_scale2',", "dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_vmaf_feature(yuv_type, ref_path, dis_path, w,", "ref1st, ref2nd _, num_dis_features = dis_scores_mtx.shape assert num_dis_features == 2", "i = 0, 1, 2, 3 vifdiff_num_scale0_scores_key = cls.get_scores_key('vifdiff_num_scale0') vifdiff_den_scale0_scores_key", "None with YuvReader(filepath=asset.ref_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as ref_yuv_reader: scores_mtx_list =", "= ref_y.mean() secondm = ref_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i", "scores in a log file. 3) Override _get_feature_scores(self, asset), which", "fix # VERSION = '0.2' # expose vif_num, vif_den, adm_num,", "feature passing scale into dwt_quant_step # VERSION = '0.2.4b' #", "to_list(map(get_var, zip(result.result_dict[ref1st_scores_key], result.result_dict[ref2nd_scores_key]))) result.result_dict[disvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[dis1st_scores_key], result.result_dict[dis2nd_scores_key]))) #", "VERSION = \"1.0\" # call executable VERSION = \"1.1\" #", "= list( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) ) result.result_dict[vif_scale1_scores_key] = list( (np.array(result.result_dict[vif_num_scale1_scores_key])", "== atom_feature_idx_dict[atom_feature] # parse value, allowing NaN and inf val", "= 0 ATOM_FEATURES = ['ssim', 'ssim_l', 'ssim_c', 'ssim_s'] def _generate_result(self,", "the scores in a dictionary format. log_file_path = self._get_log_file_path(asset) atom_feature_scores_dict", "cls)._post_process_result(result) # calculate refvar and disvar from ref1st, ref2nd, dis1st,", "cls.get_scores_key('adm_num_scale1') adm_den_scale1_scores_key = cls.get_scores_key('adm_den_scale1') adm_num_scale2_scores_key = cls.get_scores_key('adm_num_scale2') adm_den_scale2_scores_key = cls.get_scores_key('adm_den_scale2')", "w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_ms_ssim(yuv_type, ref_path, dis_path, w, h,", "'rt') as log_file: log_str = log_file.read() log_dict = ast.literal_eval(log_str) ref_scores_mtx", "+ ADM_SCALE_CONSTANT) / (adm_den_scale1 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale2 +", "in result.result_dict return result class VifFrameDifferenceFeatureExtractor(FeatureExtractor): TYPE = \"VifDiff_feature\" VERSION", "w, h, log_file_path, logger) @classmethod def _post_process_result(cls, result): # override", "list( (np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) ) #", "FeatureExtractor must: 1) Override TYPE and VERSION 2) Override _generate_result(self,", "# expose vif num/den of each scale # VERSION =", "refvar_scores_key = cls.get_scores_key('refvar') ref1st_scores_key = cls.get_scores_key('ref1st') ref2nd_scores_key = cls.get_scores_key('ref2nd') disvar_scores_key", "ref_scores_mtx = np.array(log_dict['ref_scores_mtx']) dis_scores_mtx = np.array(log_dict['dis_scores_mtx']) _, num_ref_features = ref_scores_mtx.shape", "of assets, and run feature extraction on them, and return", "= {'ref_scores_mtx': ref_scores_mtx.tolist(), 'dis_scores_mtx': dis_scores_mtx.tolist()} log_file_path = self._get_log_file_path(asset) with open(log_file_path,", "None with YuvReader(filepath=asset.dis_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as dis_yuv_reader: scores_mtx_list =", "0, 1, 2, 3 adm_num_scale0_scores_key = cls.get_scores_key('adm_num_scale0') adm_den_scale0_scores_key = cls.get_scores_key('adm_den_scale0')", "secondm = dis_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1", "cls.get_scores_key('vifdiff_num_scale1') vifdiff_den_scale1_scores_key = cls.get_scores_key('vifdiff_den_scale1') vifdiff_num_scale2_scores_key = cls.get_scores_key('vifdiff_num_scale2') vifdiff_den_scale2_scores_key = cls.get_scores_key('vifdiff_den_scale2')", "dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_ms_ssim(yuv_type, ref_path, dis_path, w,", "'adm_den_scale1', 'adm_num_scale2', 'adm_den_scale2', 'adm_num_scale3', 'adm_den_scale3', ] DERIVED_ATOM_FEATURES = ['vif_scale0', 'vif_scale1',", "into dwt_quant_step # VERSION = '0.2.4b' # Modify by adding", "scores_mtx_list = [] i = 0 for dis_yuv in dis_yuv_reader:", "+ cls.ADM_SCALE_CONSTANT) ) # adm3 = \\ # (((adm_num_scale0 +", "/ np.array(result.result_dict[vif_den_scale0_scores_key])) ) result.result_dict[vif_scale1_scores_key] = list( (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) )", "+ (vif_num_scale1 / vif_den_scale1) + # (vif_num_scale2 / vif_den_scale2) +", "type=cls.TYPE, atom_feature=atom_feature) def _get_feature_scores(self, asset): # routine to read the", "assert cls.get_scores_key(feature) in result.result_dict return result class SsimFeatureExtractor(FeatureExtractor): TYPE =", "cls.get_scores_key('vif_den_scale3') vif_scale0_scores_key = cls.get_scores_key('vif_scale0') vif_scale1_scores_key = cls.get_scores_key('vif_scale1') vif_scale2_scores_key = cls.get_scores_key('vif_scale2')", "'ref2nd', 'dis1st', 'dis2nd', ] DERIVED_ATOM_FEATURES = ['refvar', 'disvar', ] def", "= list(dis_scores_mtx[:, 0]) feature_result[self.get_scores_key('dis2nd')] = list(dis_scores_mtx[:, 1]) return feature_result @classmethod", "['vifdiff', 'vifdiff_num', 'vifdiff_den', 'vifdiff_num_scale0', 'vifdiff_den_scale0', 'vifdiff_num_scale1', 'vifdiff_den_scale1', 'vifdiff_num_scale2', 'vifdiff_den_scale2', 'vifdiff_num_scale3',", "vifdiff_num_scalei / vifdiff_den_scalei, i = 0, 1, 2, 3 vifdiff_num_scale0_scores_key", "not None and dis_scores_mtx is not None log_dict = {'ref_scores_mtx':", "as log_file: log_str = log_file.read() log_dict = ast.literal_eval(log_str) ref_scores_mtx =", ") # vif_scalei = vif_num_scalei / vif_den_scalei, i = 0,", "(np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) ) # adm3", "result): # override Executor._post_process_result result = super(VifFrameDifferenceFeatureExtractor, cls)._post_process_result(result) # vifdiff_scalei", "result.result_dict[adm3_scores_key] = list( ( ((np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) +", "logger = self.logger ExternalProgramCaller.call_ssim(yuv_type, ref_path, dis_path, w, h, log_file_path, logger)", "assert ref_scores_mtx is not None and dis_scores_mtx is not None", "feature scores from the log file, and return the scores", "yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_ms_ssim(yuv_type, ref_path,", "update adm features by folding noise floor into per coef", "_get_feature_scores(self, asset): # routine to read the feature scores from", "cls.get_scores_key('adm3') result.result_dict[adm3_scores_key] = list( ( ((np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key])", "feature # scores in the log file. quality_w, quality_h =", "'vif_den_scale3', 'adm_num_scale0', 'adm_den_scale0', 'adm_num_scale1', 'adm_den_scale1', 'adm_num_scale2', 'adm_den_scale2', 'adm_num_scale3', 'adm_den_scale3', ]", "['vifdiff_scale0', 'vifdiff_scale1', 'vifdiff_scale2', 'vifdiff_scale3', ] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT =", "/ (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) ) # adm3 = \\ #", "cls.get_scores_key('adm_scale0') adm_scale1_scores_key = cls.get_scores_key('adm_scale1') adm_scale2_scores_key = cls.get_scores_key('adm_scale2') adm_scale3_scores_key = cls.get_scores_key('adm_scale3')", "cls.get_scores_key('adm_den') result.result_dict[adm2_scores_key] = list( (np.array(result.result_dict[adm_num_scores_key]) + cls.ADM2_CONSTANT) / (np.array(result.result_dict[adm_den_scores_key]) +", "the log file, and return # the scores in a", "width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as dis_yuv_reader: scores_mtx_list = [] i =", "OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES = ['ssim', 'ssim_l', 'ssim_c', 'ssim_s'] def", "= cls.get_scores_key('vif_scale1') vif_scale2_scores_key = cls.get_scores_key('vif_scale2') vif_scale3_scores_key = cls.get_scores_key('vif_scale3') result.result_dict[vif_scale0_scores_key] =", "+ # (vif_num_scale2 / vif_den_scale2) + (vif_num_scale3 / vif_den_scale3)) /", "'vif_scale3', 'vif2', 'adm2', 'adm3', 'adm_scale0', 'adm_scale1', 'adm_scale2', 'adm_scale3', ] ADM2_CONSTANT", "(np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) ) result.result_dict[vif_scale3_scores_key] = list( (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key]))", "quality # scores in the log file. quality_width, quality_height =", "with open(log_file_path, 'rt') as log_file: log_str = log_file.read() log_dict =", "(adm_den_scale0 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale1 + ADM_SCALE_CONSTANT) / (adm_den_scale1", "3 vifdiff_num_scale0_scores_key = cls.get_scores_key('vifdiff_num_scale0') vifdiff_den_scale0_scores_key = cls.get_scores_key('vifdiff_den_scale0') vifdiff_num_scale1_scores_key = cls.get_scores_key('vifdiff_num_scale1')", "np.isnan(val) or np.isinf(val): val = None atom_feature_scores_dict[atom_feature].append(val) atom_feature_idx_dict[atom_feature] += 1", "0]) feature_result[self.get_scores_key('ref2nd')] = list(ref_scores_mtx[:, 1]) feature_result[self.get_scores_key('dis1st')] = list(dis_scores_mtx[:, 0]) feature_result[self.get_scores_key('dis2nd')]", "3 adm_num_scale0_scores_key = cls.get_scores_key('adm_num_scale0') adm_den_scale0_scores_key = cls.get_scores_key('adm_den_scale0') adm_num_scale1_scores_key = cls.get_scores_key('adm_num_scale1')", "/ vif_den_scale1) + # (vif_num_scale2 / vif_den_scale2) + (vif_num_scale3 /", "TYPE = \"VifDiff_feature\" VERSION = '0.1' ATOM_FEATURES = ['vifdiff', 'vifdiff_num',", "((adm_num_scale1 + ADM_SCALE_CONSTANT) / (adm_den_scale1 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale2", "VERSION = '0.2.3' # AVX for VMAF convolution; update adm", "yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_psnr(yuv_type, ref_path,", "list( (np.array(result.result_dict[vifdiff_num_scale3_scores_key]) / np.array(result.result_dict[vifdiff_den_scale3_scores_key])) ) # validate for feature in", "\\ \"Feature data possibly corrupt. Run cleanup script and try", "updated VIF corner case # VERSION = '0.2.2b' # expose", "get_scores_key(cls, atom_feature): return \"{type}_{atom_feature}_scores\".format( type=cls.TYPE, atom_feature=atom_feature) @classmethod def get_score_key(cls, atom_feature):", "vif_den_scale2_scores_key = cls.get_scores_key('vif_den_scale2') vif_num_scale3_scores_key = cls.get_scores_key('vif_num_scale3') vif_den_scale3_scores_key = cls.get_scores_key('vif_den_scale3') vif_scale0_scores_key", "cls.get_scores_key('vifdiff_num_scale2') vifdiff_den_scale2_scores_key = cls.get_scores_key('vifdiff_den_scale2') vifdiff_num_scale3_scores_key = cls.get_scores_key('vifdiff_num_scale3') vifdiff_den_scale3_scores_key = cls.get_scores_key('vifdiff_den_scale3')", "dis_scores_mtx is not None log_dict = {'ref_scores_mtx': ref_scores_mtx.tolist(), 'dis_scores_mtx': dis_scores_mtx.tolist()}", "not None log_dict = {'ref_scores_mtx': ref_scores_mtx.tolist(), 'dis_scores_mtx': dis_scores_mtx.tolist()} log_file_path =", "DERIVED_ATOM_FEATURES = ['vifdiff_scale0', 'vifdiff_scale1', 'vifdiff_scale2', 'vifdiff_scale3', ] ADM2_CONSTANT = 0", "# Fix a bug in adm feature passing scale into", "adm_num_scale3_scores_key = cls.get_scores_key('adm_num_scale3') adm_den_scale3_scores_key = cls.get_scores_key('adm_den_scale3') adm_scale0_scores_key = cls.get_scores_key('adm_scale0') adm_scale1_scores_key", "(np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) +", "# + ((adm_num_scale2 + ADM_SCALE_CONSTANT) / (adm_den_scale2 + ADM_SCALE_CONSTANT)) #", "open(log_file_path, 'rt') as log_file: log_str = log_file.read() log_dict = ast.literal_eval(log_str)", ") # adm_scalei = adm_num_scalei / adm_den_scalei, i = 0,", "(np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) ) # adm3 = \\ # (((adm_num_scale0", "(adm_den + ADM2_CONSTANT) adm2_scores_key = cls.get_scores_key('adm2') adm_num_scores_key = cls.get_scores_key('adm_num') adm_den_scores_key", "(np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale3_scores_key] =", "1]) feature_result[self.get_scores_key('dis1st')] = list(dis_scores_mtx[:, 0]) feature_result[self.get_scores_key('dis2nd')] = list(dis_scores_mtx[:, 1]) return", "Executor._post_process_result result = super(VmafFeatureExtractor, cls)._post_process_result(result) # adm2 = # (adm_num", "cls.get_scores_key('adm_scale2') adm_scale3_scores_key = cls.get_scores_key('adm_scale3') result.result_dict[adm_scale0_scores_key] = list( (np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT)", "atom_feature in self.ATOM_FEATURES[1:]: assert len_score == len(atom_feature_scores_dict[atom_feature]), \\ \"Feature data", "'0.2.4b' # Modify by adding ADM noise floor outside cube", "= None atom_feature_scores_dict[atom_feature].append(val) atom_feature_idx_dict[atom_feature] += 1 continue len_score = len(atom_feature_scores_dict[self.ATOM_FEATURES[0]])", "atom_feature_idx_dict[atom_feature] = 0 with open(log_file_path, 'rt') as log_file: for line", "# VERSION = '0.2.1' # expose vif num/den of each", "= '0.1' # vmaf_study; Anush's VIF fix # VERSION =", "vif_num_scale2_scores_key = cls.get_scores_key('vif_num_scale2') vif_den_scale2_scores_key = cls.get_scores_key('vif_den_scale2') vif_num_scale3_scores_key = cls.get_scores_key('vif_num_scale3') vif_den_scale3_scores_key", "ref2nd _, num_dis_features = dis_scores_mtx.shape assert num_dis_features == 2 #", "0 ADM_SCALE_CONSTANT = 0 def _generate_result(self, asset): # routine to", "atom_feature_scores_dict[atom_feature] return feature_result class VmafFeatureExtractor(FeatureExtractor): TYPE = \"VMAF_feature\" # VERSION", "Netflix, Inc.\" __license__ = \"Apache, Version 2.0\" import re import", "# parse value, allowing NaN and inf val = float(mo.group(2))", "m[0] * m[0] result.result_dict[refvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[ref1st_scores_key], result.result_dict[ref2nd_scores_key]))) result.result_dict[disvar_scores_key]", "cls.get_scores_key('vif_scale0') vif_scale1_scores_key = cls.get_scores_key('vif_scale1') vif_scale2_scores_key = cls.get_scores_key('vif_scale2') vif_scale3_scores_key = cls.get_scores_key('vif_scale3')", "= super(MomentFeatureExtractor, cls)._post_process_result(result) # calculate refvar and disvar from ref1st,", "routine to read the feature scores from the log file,", "= cls.get_scores_key('vifdiff_num_scale0') vifdiff_den_scale0_scores_key = cls.get_scores_key('vifdiff_den_scale0') vifdiff_num_scale1_scores_key = cls.get_scores_key('vifdiff_num_scale1') vifdiff_den_scale1_scores_key =", "vif_den_scale3_scores_key = cls.get_scores_key('vif_den_scale3') vif_scale0_scores_key = cls.get_scores_key('vif_scale0') vif_scale1_scores_key = cls.get_scores_key('vif_scale1') vif_scale2_scores_key", "w, h, log_file_path, logger) class MomentFeatureExtractor(FeatureExtractor): TYPE = \"Moment_feature\" #", "'ms_ssim_c_scale1', 'ms_ssim_s_scale1', 'ms_ssim_l_scale2', 'ms_ssim_c_scale2', 'ms_ssim_s_scale2', 'ms_ssim_l_scale3', 'ms_ssim_c_scale3', 'ms_ssim_s_scale3', 'ms_ssim_l_scale4', 'ms_ssim_c_scale4',", "self.ATOM_FEATURES[1:]: assert len_score == len(atom_feature_scores_dict[atom_feature]), \\ \"Feature data possibly corrupt.", "scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1 dis_scores_mtx = np.vstack(scores_mtx_list) assert ref_scores_mtx", "'ms_ssim_s_scale4', ] def _generate_result(self, asset): # routine to call the", "= cls.get_scores_key('vif_scale2') vif_scale3_scores_key = cls.get_scores_key('vif_scale3') result.result_dict[vif_scale0_scores_key] = list( (np.array(result.result_dict[vif_num_scale0_scores_key]) /", "takes in a list of assets, and run feature extraction", "in self.ATOM_FEATURES: scores_key = self.get_scores_key(atom_feature) feature_result[scores_key] = atom_feature_scores_dict[atom_feature] return feature_result", "vif_den_scalei, i = 0, 1, 2, 3 vif_num_scale0_scores_key = cls.get_scores_key('vif_num_scale0')", "0, 1, 2, 3 vif_num_scale0_scores_key = cls.get_scores_key('vif_num_scale0') vif_den_scale0_scores_key = cls.get_scores_key('vif_den_scale0')", "def _generate_result(self, asset): # routine to call the command-line executable", ") result.result_dict[vif_scale3_scores_key] = list( (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) # vif2", "'ms_ssim_c_scale0', 'ms_ssim_s_scale0', 'ms_ssim_l_scale1', 'ms_ssim_c_scale1', 'ms_ssim_s_scale1', 'ms_ssim_l_scale2', 'ms_ssim_c_scale2', 'ms_ssim_s_scale2', 'ms_ssim_l_scale3', 'ms_ssim_c_scale3',", "adm_num_scores_key = cls.get_scores_key('adm_num') adm_den_scores_key = cls.get_scores_key('adm_den') result.result_dict[adm2_scores_key] = list( (np.array(result.result_dict[adm_num_scores_key])", "= list( (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) ) result.result_dict[vif_scale3_scores_key] = list( (np.array(result.result_dict[vif_num_scale3_scores_key])", "'adm_den', 'anpsnr', 'vif_num_scale0', 'vif_den_scale0', 'vif_num_scale1', 'vif_den_scale1', 'vif_num_scale2', 'vif_den_scale2', 'vif_num_scale3', 'vif_den_scale3',", "(np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) + (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) + (np.array(result.result_dict[vif_num_scale2_scores_key]) /", "TYPE and VERSION 2) Override _generate_result(self, asset), which call a", "A derived class of FeatureExtractor must: 1) Override TYPE and", "in ref_yuv_reader: ref_y = ref_yuv[0] firstm = ref_y.mean() secondm =", "cls.get_scores_key('vif2') result.result_dict[vif_scores_key] = list( ( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) + (np.array(result.result_dict[vif_num_scale1_scores_key])", "= '0.2.1' # expose vif num/den of each scale #", "= cls.get_scores_key('adm_scale3') result.result_dict[adm_scale0_scores_key] = list( (np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key])", "= None with YuvReader(filepath=asset.ref_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as ref_yuv_reader: scores_mtx_list", "{'ref_scores_mtx': ref_scores_mtx.tolist(), 'dis_scores_mtx': dis_scores_mtx.tolist()} log_file_path = self._get_log_file_path(asset) with open(log_file_path, 'wt')", "4.0 adm3_scores_key = cls.get_scores_key('adm3') result.result_dict[adm3_scores_key] = list( ( ((np.array(result.result_dict[adm_num_scale0_scores_key]) +", "(np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) +", "command-line executable and generate feature # scores in the log", "atom_feature in self.ATOM_FEATURES: atom_feature_scores_dict[atom_feature] = [] atom_feature_idx_dict[atom_feature] = 0 with", "'vif_num_scale1', 'vif_den_scale1', 'vif_num_scale2', 'vif_den_scale2', 'vif_num_scale3', 'vif_den_scale3', 'adm_num_scale0', 'adm_den_scale0', 'adm_num_scale1', 'adm_den_scale1',", "/ (adm_den_scale0 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale1 + ADM_SCALE_CONSTANT) /", "ADM_SCALE_CONSTANT) / (adm_den_scale0 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale1 + ADM_SCALE_CONSTANT)", "= list( (np.array(result.result_dict[vifdiff_num_scale2_scores_key]) / np.array(result.result_dict[vifdiff_den_scale2_scores_key])) ) result.result_dict[vifdiff_scale3_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale3_scores_key])", "_generate_result(self, asset): # routine to call the command-line executable and", "PsnrFeatureExtractor(FeatureExtractor): TYPE = \"PSNR_feature\" VERSION = \"1.0\" ATOM_FEATURES = ['psnr']", "= self._get_log_file_path(asset) with open(log_file_path, 'rt') as log_file: log_str = log_file.read()", "Modify by adding ADM noise floor outside cube root; add", "return the scores in a dictionary format. For an example,", "adm_den_scale2_scores_key = cls.get_scores_key('adm_den_scale2') adm_num_scale3_scores_key = cls.get_scores_key('adm_num_scale3') adm_den_scale3_scores_key = cls.get_scores_key('adm_den_scale3') adm_scale0_scores_key", "(np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) # vif2 = # ((vif_num_scale0 /", "'motion2', 'vif_num', 'vif_den', 'adm_num', 'adm_den', 'anpsnr', 'vif_num_scale0', 'vif_den_scale0', 'vif_num_scale1', 'vif_den_scale1',", "# override Executor._post_process_result result = super(VifFrameDifferenceFeatureExtractor, cls)._post_process_result(result) # vifdiff_scalei =", "(np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) + (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) / 4.0", "_read_result(self, asset): result = {} result.update(self._get_feature_scores(asset)) executor_id = self.executor_id return", "dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_vifdiff_feature(yuv_type, ref_path, dis_path, w,", "cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) /", "= cls.get_scores_key('adm_scale2') adm_scale3_scores_key = cls.get_scores_key('adm_scale3') result.result_dict[adm_scale0_scores_key] = list( (np.array(result.result_dict[adm_num_scale0_scores_key]) +", "in result.result_dict return result class SsimFeatureExtractor(FeatureExtractor): TYPE = \"SSIM_feature\" #", "adm_den_scale1_scores_key = cls.get_scores_key('adm_den_scale1') adm_num_scale2_scores_key = cls.get_scores_key('adm_num_scale2') adm_den_scale2_scores_key = cls.get_scores_key('adm_den_scale2') adm_num_scale3_scores_key", "= '0.2.3' # AVX for VMAF convolution; update adm features", "Result(asset, executor_id, result) @classmethod def get_scores_key(cls, atom_feature): return \"{type}_{atom_feature}_scores\".format( type=cls.TYPE,", "vifdiff_den_scale3_scores_key = cls.get_scores_key('vifdiff_den_scale3') vifdiff_scale0_scores_key = cls.get_scores_key('vifdiff_scale0') vifdiff_scale1_scores_key = cls.get_scores_key('vifdiff_scale1') vifdiff_scale2_scores_key", "/ np.array(result.result_dict[vif_den_scale1_scores_key])) + (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) + (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key]))", "= ['ssim', 'ssim_l', 'ssim_c', 'ssim_s'] def _generate_result(self, asset): # routine", "by it can be identified. A derived class of FeatureExtractor", "self.logger ExternalProgramCaller.call_vmaf_feature(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) @classmethod def", "cls.get_scores_key('vifdiff_scale2') vifdiff_scale3_scores_key = cls.get_scores_key('vifdiff_scale3') result.result_dict[vifdiff_scale0_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale0_scores_key]) / np.array(result.result_dict[vifdiff_den_scale0_scores_key]))", "scores_mtx_list = [] i = 0 for ref_yuv in ref_yuv_reader:", "= cls.get_scores_key('adm_num') adm_den_scores_key = cls.get_scores_key('adm_den') result.result_dict[adm2_scores_key] = list( (np.array(result.result_dict[adm_num_scores_key]) +", "in result.result_dict return result class PsnrFeatureExtractor(FeatureExtractor): TYPE = \"PSNR_feature\" VERSION", "with open(log_file_path, 'rt') as log_file: for line in log_file.readlines(): for", "1, 2, 3 vif_num_scale0_scores_key = cls.get_scores_key('vif_num_scale0') vif_den_scale0_scores_key = cls.get_scores_key('vif_den_scale0') vif_num_scale1_scores_key", "_generate_result(self, asset), which call a command-line executable and generate feature", "(np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) + (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) + (np.array(result.result_dict[vif_num_scale3_scores_key]) /", "return result class VifFrameDifferenceFeatureExtractor(FeatureExtractor): TYPE = \"VifDiff_feature\" VERSION = '0.1'", "ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_psnr(yuv_type, ref_path, dis_path,", "the Result generated by it can be identified. A derived", "((np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale2_scores_key]) +", "cur_idx == atom_feature_idx_dict[atom_feature] # parse value, allowing NaN and inf", "'vifdiff_scale2', 'vifdiff_scale3', ] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT = 0 def", "# expose adm_den/num_scalex # VERSION = '0.2.3' # AVX for", "+ cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale3_scores_key] = list( (np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) /", "# routine to call the command-line executable and generate quality", "dis_path, w, h, log_file_path, logger) class MomentFeatureExtractor(FeatureExtractor): TYPE = \"Moment_feature\"", "cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale2_scores_key] = list( (np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key])", "dis_y.mean() secondm = dis_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i +=", "and return the scores in a dictionary format. For an", "(adm_den_scale1 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale2 + ADM_SCALE_CONSTANT) / (adm_den_scale2", "= adm_num_scalei / adm_den_scalei, i = 0, 1, 2, 3", "logger = self.logger ExternalProgramCaller.call_vmaf_feature(yuv_type, ref_path, dis_path, w, h, log_file_path, logger)", "/ vif_den_scalei, i = 0, 1, 2, 3 vif_num_scale0_scores_key =", "# vif_scalei = vif_num_scalei / vif_den_scalei, i = 0, 1,", "\"1.0\" VERSION = \"1.1\" # fix OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES", "'vif_num', 'vif_den', 'adm_num', 'adm_den', 'anpsnr', 'vif_num_scale0', 'vif_den_scale0', 'vif_num_scale1', 'vif_den_scale1', 'vif_num_scale2',", "NotImplementedError def _read_result(self, asset): result = {} result.update(self._get_feature_scores(asset)) executor_id =", "# fix OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES = ['ms_ssim', 'ms_ssim_l_scale0', 'ms_ssim_c_scale0',", "= cls.get_scores_key('vif_scale3') result.result_dict[vif_scale0_scores_key] = list( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) ) result.result_dict[vif_scale1_scores_key]", "VmafExternalConfig from vmaf.core.executor import Executor from vmaf.core.result import Result from", "'vifdiff_den', 'vifdiff_num_scale0', 'vifdiff_den_scale0', 'vifdiff_num_scale1', 'vifdiff_den_scale1', 'vifdiff_num_scale2', 'vifdiff_den_scale2', 'vifdiff_num_scale3', 'vifdiff_den_scale3', ]", "feature # scores in the log file. quality_width, quality_height =", "corrected border handling, uniform reading with option of offset for", "_post_process_result(cls, result): # override Executor._post_process_result result = super(VmafFeatureExtractor, cls)._post_process_result(result) #", "= self.logger ExternalProgramCaller.call_ssim(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) class", "+ cls.ADM2_CONSTANT) / (np.array(result.result_dict[adm_den_scores_key]) + cls.ADM2_CONSTANT) ) # vif_scalei =", "ADM_SCALE_CONSTANT) / (adm_den_scale1 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale2 + ADM_SCALE_CONSTANT)", "dis1st, dis2nd refvar_scores_key = cls.get_scores_key('refvar') ref1st_scores_key = cls.get_scores_key('ref1st') ref2nd_scores_key =", "+ ADM_SCALE_CONSTANT) / (adm_den_scale2 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale3 +", "bug in adm feature passing scale into dwt_quant_step # VERSION", "(np.array(result.result_dict[vifdiff_num_scale3_scores_key]) / np.array(result.result_dict[vifdiff_den_scale3_scores_key])) ) # validate for feature in cls.DERIVED_ATOM_FEATURES:", "/ np.array(result.result_dict[vif_den_scale2_scores_key])) ) result.result_dict[vif_scale3_scores_key] = list( (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) )", "'dis1st', 'dis2nd', ] DERIVED_ATOM_FEATURES = ['refvar', 'disvar', ] def _generate_result(self,", "ABCMeta, abstractmethod import os from vmaf.tools.misc import make_absolute_path, run_process from", "] def _generate_result(self, asset): # routine to call the command-line", "( ((np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale1_scores_key])", "= cls.get_scores_key('ref1st') ref2nd_scores_key = cls.get_scores_key('ref2nd') disvar_scores_key = cls.get_scores_key('disvar') dis1st_scores_key =", "_get_feature_scores(self, asset), which read the feature scores from the log", "and generate feature # scores in the log file. quality_width,", "that the Result generated by it can be identified. A", "fix OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES = ['ssim', 'ssim_l', 'ssim_c', 'ssim_s']", "Override TYPE and VERSION 2) Override _generate_result(self, asset), which call", "TYPE = \"SSIM_feature\" # VERSION = \"1.0\" VERSION = \"1.1\"", "feature_result = {} feature_result[self.get_scores_key('ref1st')] = list(ref_scores_mtx[:, 0]) feature_result[self.get_scores_key('ref2nd')] = list(ref_scores_mtx[:,", "= atom_feature_scores_dict[atom_feature] return feature_result class VmafFeatureExtractor(FeatureExtractor): TYPE = \"VMAF_feature\" #", "'0.1' # vmaf_study; Anush's VIF fix # VERSION = '0.2'", "dis_scores_mtx = None with YuvReader(filepath=asset.dis_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as dis_yuv_reader:", "'adm', 'ansnr', 'motion', 'motion2', 'vif_num', 'vif_den', 'adm_num', 'adm_den', 'anpsnr', 'vif_num_scale0',", "= list( (np.array(result.result_dict[vifdiff_num_scale0_scores_key]) / np.array(result.result_dict[vifdiff_den_scale0_scores_key])) ) result.result_dict[vifdiff_scale1_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale1_scores_key])", "list( ( ((np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT)) +", "+ firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1 ref_scores_mtx = np.vstack(scores_mtx_list)", "\"Copyright 2016-2018, Netflix, Inc.\" __license__ = \"Apache, Version 2.0\" import", "# override Executor._post_process_result result = super(MomentFeatureExtractor, cls)._post_process_result(result) # calculate refvar", "cls.get_scores_key('vifdiff_num_scale0') vifdiff_den_scale0_scores_key = cls.get_scores_key('vifdiff_den_scale0') vifdiff_num_scale1_scores_key = cls.get_scores_key('vifdiff_num_scale1') vifdiff_den_scale1_scores_key = cls.get_scores_key('vifdiff_den_scale1')", "vmaf.core.executor import Executor from vmaf.core.result import Result from vmaf.tools.reader import", "= cls.get_scores_key('adm_scale0') adm_scale1_scores_key = cls.get_scores_key('adm_scale1') adm_scale2_scores_key = cls.get_scores_key('adm_scale2') adm_scale3_scores_key =", "adm_num_scale2_scores_key = cls.get_scores_key('adm_num_scale2') adm_den_scale2_scores_key = cls.get_scores_key('adm_den_scale2') adm_num_scale3_scores_key = cls.get_scores_key('adm_num_scale3') adm_den_scale3_scores_key", "scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1 ref_scores_mtx = np.vstack(scores_mtx_list) dis_scores_mtx =", "motion2 to c code ATOM_FEATURES = ['vif', 'adm', 'ansnr', 'motion',", "_post_process_result(cls, result): # override Executor._post_process_result result = super(VifFrameDifferenceFeatureExtractor, cls)._post_process_result(result) #", "cleanup script and try again.\" feature_result = {} for atom_feature", "= cls.get_scores_key('adm3') result.result_dict[adm3_scores_key] = list( ( ((np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) /", "0, 1, 2, 3 vifdiff_num_scale0_scores_key = cls.get_scores_key('vifdiff_num_scale0') vifdiff_den_scale0_scores_key = cls.get_scores_key('vifdiff_den_scale0')", "'vif_num_scale0', 'vif_den_scale0', 'vif_num_scale1', 'vif_den_scale1', 'vif_num_scale2', 'vif_den_scale2', 'vif_num_scale3', 'vif_den_scale3', 'adm_num_scale0', 'adm_den_scale0',", "from vmaf.core.executor import Executor from vmaf.core.result import Result from vmaf.tools.reader", "= self._get_log_file_path(asset) with open(log_file_path, 'wt') as log_file: log_file.write(str(log_dict)) def _get_feature_scores(self,", "# ref1st, ref2nd _, num_dis_features = dis_scores_mtx.shape assert num_dis_features ==", "[secondm]))) i += 1 ref_scores_mtx = np.vstack(scores_mtx_list) dis_scores_mtx = None", "/ (adm_den_scale1 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale2 + ADM_SCALE_CONSTANT) /", "= ['vif_scale0', 'vif_scale1', 'vif_scale2', 'vif_scale3', 'vif2', 'adm2', 'adm3', 'adm_scale0', 'adm_scale1',", "ref_path, dis_path, w, h, log_file_path, logger) @classmethod def _post_process_result(cls, result):", "or np.isinf(val): val = None atom_feature_scores_dict[atom_feature].append(val) atom_feature_idx_dict[atom_feature] += 1 continue", "= super(VmafFeatureExtractor, cls)._post_process_result(result) # adm2 = # (adm_num + ADM2_CONSTANT)", "'0.2.1' # expose vif num/den of each scale # VERSION", "result): # override Executor._post_process_result result = super(VmafFeatureExtractor, cls)._post_process_result(result) # adm2", "= cls.get_scores_key('vifdiff_scale2') vifdiff_scale3_scores_key = cls.get_scores_key('vifdiff_scale3') result.result_dict[vifdiff_scale0_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale0_scores_key]) /", "adm3_scores_key = cls.get_scores_key('adm3') result.result_dict[adm3_scores_key] = list( ( ((np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT)", "= cls.get_scores_key('adm2') adm_num_scores_key = cls.get_scores_key('adm_num') adm_den_scores_key = cls.get_scores_key('adm_den') result.result_dict[adm2_scores_key] =", "ATOM_FEATURES = ['vifdiff', 'vifdiff_num', 'vifdiff_den', 'vifdiff_num_scale0', 'vifdiff_den_scale0', 'vifdiff_num_scale1', 'vifdiff_den_scale1', 'vifdiff_num_scale2',", "np.array(result.result_dict[vifdiff_den_scale1_scores_key])) ) result.result_dict[vifdiff_scale2_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale2_scores_key]) / np.array(result.result_dict[vifdiff_den_scale2_scores_key])) ) result.result_dict[vifdiff_scale3_scores_key]", "(by the TYPE and VERSION attribute), so that the Result", "in a log file. 3) Override _get_feature_scores(self, asset), which read", "expose vif_num, vif_den, adm_num, adm_den, anpsnr # VERSION = '0.2.1'", "1 ref_scores_mtx = np.vstack(scores_mtx_list) dis_scores_mtx = None with YuvReader(filepath=asset.dis_workfile_path, width=quality_w,", "ADM_SCALE_CONSTANT = 0 def _generate_result(self, asset): # routine to call", "3) Override _get_feature_scores(self, asset), which read the feature scores from", "re.match(re_template, line) if mo: cur_idx = int(mo.group(1)) assert cur_idx ==", "np.array(result.result_dict[vif_den_scale1_scores_key])) + (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) + (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) )", "= ['vif', 'adm', 'ansnr', 'motion', 'motion2', 'vif_num', 'vif_den', 'adm_num', 'adm_den',", "quality_width, quality_height = asset.quality_width_height log_file_path = self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path", "zip(result.result_dict[dis1st_scores_key], result.result_dict[dis2nd_scores_key]))) # validate for feature in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature)", "feature_result = {} for atom_feature in self.ATOM_FEATURES: scores_key = self.get_scores_key(atom_feature)", "self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_vmaf_feature(yuv_type,", "+ cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT))", "version combination (by the TYPE and VERSION attribute), so that", "ADM2_CONSTANT) adm2_scores_key = cls.get_scores_key('adm2') adm_num_scores_key = cls.get_scores_key('adm_num') adm_den_scores_key = cls.get_scores_key('adm_den')", "+ cls.ADM2_CONSTANT) ) # vif_scalei = vif_num_scalei / vif_den_scalei, i", "for line in log_file.readlines(): for atom_feature in self.ATOM_FEATURES: re_template =", "cls)._post_process_result(result) # adm2 = # (adm_num + ADM2_CONSTANT) / (adm_den", "[] i = 0 for ref_yuv in ref_yuv_reader: ref_y =", "# vifdiff_scalei = vifdiff_num_scalei / vifdiff_den_scalei, i = 0, 1,", "noise floor outside cube root; add derived feature motion2 VERSION", "result.result_dict return result class PsnrFeatureExtractor(FeatureExtractor): TYPE = \"PSNR_feature\" VERSION =", "+ ((adm_num_scale2 + ADM_SCALE_CONSTANT) / (adm_den_scale2 + ADM_SCALE_CONSTANT)) # +", "+ ((adm_num_scale3 + ADM_SCALE_CONSTANT) / (adm_den_scale3 + ADM_SCALE_CONSTANT))) / 4.0", "cls.get_scores_key('vifdiff_den_scale2') vifdiff_num_scale3_scores_key = cls.get_scores_key('vifdiff_num_scale3') vifdiff_den_scale3_scores_key = cls.get_scores_key('vifdiff_den_scale3') vifdiff_scale0_scores_key = cls.get_scores_key('vifdiff_scale0')", "# fix OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES = ['ssim', 'ssim_l', 'ssim_c',", "_post_process_result(cls, result): # override Executor._post_process_result result = super(MomentFeatureExtractor, cls)._post_process_result(result) #", "dis_y = dis_yuv[0] firstm = dis_y.mean() secondm = dis_y.var() +", "cls.get_scores_key('vif_den_scale0') vif_num_scale1_scores_key = cls.get_scores_key('vif_num_scale1') vif_den_scale1_scores_key = cls.get_scores_key('vif_den_scale1') vif_num_scale2_scores_key = cls.get_scores_key('vif_num_scale2')", "cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale2_scores_key] = list( (np.array(result.result_dict[adm_num_scale2_scores_key])", "'dis_scores_mtx': dis_scores_mtx.tolist()} log_file_path = self._get_log_file_path(asset) with open(log_file_path, 'wt') as log_file:", "inf val = float(mo.group(2)) if np.isnan(val) or np.isinf(val): val =", "(np.array(result.result_dict[vifdiff_num_scale2_scores_key]) / np.array(result.result_dict[vifdiff_den_scale2_scores_key])) ) result.result_dict[vifdiff_scale3_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale3_scores_key]) / np.array(result.result_dict[vifdiff_den_scale3_scores_key]))", "+ firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1 dis_scores_mtx = np.vstack(scores_mtx_list)", "\"{af}: ([0-9]+) ([a-zA-Z0-9.-]+)\".format(af=atom_feature) mo = re.match(re_template, line) if mo: cur_idx", "'ms_ssim_l_scale2', 'ms_ssim_c_scale2', 'ms_ssim_s_scale2', 'ms_ssim_l_scale3', 'ms_ssim_c_scale3', 'ms_ssim_s_scale3', 'ms_ssim_l_scale4', 'ms_ssim_c_scale4', 'ms_ssim_s_scale4', ]", "cls.get_scores_key('adm_num_scale2') adm_den_scale2_scores_key = cls.get_scores_key('adm_den_scale2') adm_num_scale3_scores_key = cls.get_scores_key('adm_num_scale3') adm_den_scale3_scores_key = cls.get_scores_key('adm_den_scale3')", "= list( (np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) )", "cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale3_scores_key] = list( (np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key])", "feature scores in a log file. 3) Override _get_feature_scores(self, asset),", "result.result_dict[adm_scale1_scores_key] = list( (np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT)", "= ['ref1st', 'ref2nd', 'dis1st', 'dis2nd', ] DERIVED_ATOM_FEATURES = ['refvar', 'disvar',", "lambda m: m[1] - m[0] * m[0] result.result_dict[refvar_scores_key] = \\", "option of offset for input YUV, updated VIF corner case", "anpsnr # VERSION = '0.2.1' # expose vif num/den of", "in self.ATOM_FEATURES[1:]: assert len_score == len(atom_feature_scores_dict[atom_feature]), \\ \"Feature data possibly", "VERSION = '0.2' # expose vif_num, vif_den, adm_num, adm_den, anpsnr", "import ExternalProgramCaller, to_list from vmaf.config import VmafConfig, VmafExternalConfig from vmaf.core.executor", "= np.vstack(scores_mtx_list) dis_scores_mtx = None with YuvReader(filepath=asset.dis_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset))", "list( (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) ) result.result_dict[vif_scale3_scores_key] = list( (np.array(result.result_dict[vif_num_scale3_scores_key]) /", "and return a list of corresponding results. A FeatureExtractor must", "= ['vifdiff_scale0', 'vifdiff_scale1', 'vifdiff_scale2', 'vifdiff_scale3', ] ADM2_CONSTANT = 0 ADM_SCALE_CONSTANT", "/ 4.0 adm3_scores_key = cls.get_scores_key('adm3') result.result_dict[adm3_scores_key] = list( ( ((np.array(result.result_dict[adm_num_scale0_scores_key])", "ATOM_FEATURES(self): raise NotImplementedError def _read_result(self, asset): result = {} result.update(self._get_feature_scores(asset))", "ref_scores_mtx = None with YuvReader(filepath=asset.ref_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as ref_yuv_reader:", "(np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale2_scores_key] =", "2) Override _generate_result(self, asset), which call a command-line executable and", "# adm_scalei = adm_num_scalei / adm_den_scalei, i = 0, 1,", "= self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger", "def _get_feature_scores(self, asset): # routine to read the feature scores", "YuvReader(filepath=asset.dis_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as dis_yuv_reader: scores_mtx_list = [] i", "logger = self.logger ExternalProgramCaller.call_vifdiff_feature(yuv_type, ref_path, dis_path, w, h, log_file_path, logger)", "cube root; add derived feature motion2 VERSION = '0.2.4c' #", "= cls.get_scores_key('vif_num_scale0') vif_den_scale0_scores_key = cls.get_scores_key('vif_den_scale0') vif_num_scale1_scores_key = cls.get_scores_key('vif_num_scale1') vif_den_scale1_scores_key =", "expose adm_den/num_scalex # VERSION = '0.2.3' # AVX for VMAF", "dis2nd_scores_key = cls.get_scores_key('dis2nd') get_var = lambda m: m[1] - m[0]", "corrupt. Run cleanup script and try again.\" feature_result = {}", "vmaf.tools.stats import ListStats __copyright__ = \"Copyright 2016-2018, Netflix, Inc.\" __license__", "(vif_num_scale3 / vif_den_scale3)) / 4.0 vif_scores_key = cls.get_scores_key('vif2') result.result_dict[vif_scores_key] =", "/ np.array(result.result_dict[vif_den_scale3_scores_key])) ) # vif2 = # ((vif_num_scale0 / vif_den_scale0)", ") / 4.0 ) # adm_scalei = adm_num_scalei / adm_den_scalei,", "cls.get_scores_key('dis2nd') get_var = lambda m: m[1] - m[0] * m[0]", ") # adm3 = \\ # (((adm_num_scale0 + ADM_SCALE_CONSTANT) /", "log_file_path = self._get_log_file_path(asset) atom_feature_scores_dict = {} atom_feature_idx_dict = {} for", "format. log_file_path = self._get_log_file_path(asset) atom_feature_scores_dict = {} atom_feature_idx_dict = {}", "self._get_log_file_path(asset) with open(log_file_path, 'wt') as log_file: log_file.write(str(log_dict)) def _get_feature_scores(self, asset):", "([0-9]+) ([a-zA-Z0-9.-]+)\".format(af=atom_feature) mo = re.match(re_template, line) if mo: cur_idx =", "= cls.get_scores_key('dis2nd') get_var = lambda m: m[1] - m[0] *", "@abstractmethod def ATOM_FEATURES(self): raise NotImplementedError def _read_result(self, asset): result =", "/ (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key])", "0]) feature_result[self.get_scores_key('dis2nd')] = list(dis_scores_mtx[:, 1]) return feature_result @classmethod def _post_process_result(cls,", "raise NotImplementedError def _read_result(self, asset): result = {} result.update(self._get_feature_scores(asset)) executor_id", "vifdiff_scalei = vifdiff_num_scalei / vifdiff_den_scalei, i = 0, 1, 2,", "h=quality_height logger = self.logger ExternalProgramCaller.call_vifdiff_feature(yuv_type, ref_path, dis_path, w, h, log_file_path,", "TYPE = \"PSNR_feature\" VERSION = \"1.0\" ATOM_FEATURES = ['psnr'] def", "# VERSION = \"1.0\" VERSION = \"1.1\" # fix OPT_RANGE_PIXEL_OFFSET", "# VERSION = '0.1' # vmaf_study; Anush's VIF fix #", "= list( (np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) )", "generate quality # scores in the log file. quality_width, quality_height", "result.result_dict[ref2nd_scores_key]))) result.result_dict[disvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[dis1st_scores_key], result.result_dict[dis2nd_scores_key]))) # validate for", "result.result_dict[vif_scale2_scores_key] = list( (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) ) result.result_dict[vif_scale3_scores_key] = list(", "+ ADM_SCALE_CONSTANT) / (adm_den_scale0 + ADM_SCALE_CONSTANT)) # + ((adm_num_scale1 +", "1 dis_scores_mtx = np.vstack(scores_mtx_list) assert ref_scores_mtx is not None and", "noise floor into per coef # VERSION = '0.2.4' #", "# VERSION = '0.2' # expose vif_num, vif_den, adm_num, adm_den,", "# call executable VERSION = \"1.1\" # python only ATOM_FEATURES", "= '0.1' ATOM_FEATURES = ['vifdiff', 'vifdiff_num', 'vifdiff_den', 'vifdiff_num_scale0', 'vifdiff_den_scale0', 'vifdiff_num_scale1',", "'vif_scale1', 'vif_scale2', 'vif_scale3', 'vif2', 'adm2', 'adm3', 'adm_scale0', 'adm_scale1', 'adm_scale2', 'adm_scale3',", "passing scale into dwt_quant_step # VERSION = '0.2.4b' # Modify", "cls.get_scores_key('adm_den_scale0') adm_num_scale1_scores_key = cls.get_scores_key('adm_num_scale1') adm_den_scale1_scores_key = cls.get_scores_key('adm_den_scale1') adm_num_scale2_scores_key = cls.get_scores_key('adm_num_scale2')", "(np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) ) result.result_dict[vif_scale2_scores_key] = list( (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key]))", "generated by it can be identified. A derived class of", "+ ADM2_CONSTANT) adm2_scores_key = cls.get_scores_key('adm2') adm_num_scores_key = cls.get_scores_key('adm_num') adm_den_scores_key =", "cls.get_scores_key('vif_num_scale3') vif_den_scale3_scores_key = cls.get_scores_key('vif_den_scale3') vif_scale0_scores_key = cls.get_scores_key('vif_scale0') vif_scale1_scores_key = cls.get_scores_key('vif_scale1')", "dis_yuv_reader: dis_y = dis_yuv[0] firstm = dis_y.mean() secondm = dis_y.var()", "= cls.get_scores_key('vif_den_scale3') vif_scale0_scores_key = cls.get_scores_key('vif_scale0') vif_scale1_scores_key = cls.get_scores_key('vif_scale1') vif_scale2_scores_key =", "cls.get_scores_key('vif_den_scale2') vif_num_scale3_scores_key = cls.get_scores_key('vif_num_scale3') vif_den_scale3_scores_key = cls.get_scores_key('vif_den_scale3') vif_scale0_scores_key = cls.get_scores_key('vif_scale0')", "+ cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT))", "by folding noise floor into per coef # VERSION =", "'adm_num', 'adm_den', 'anpsnr', 'vif_num_scale0', 'vif_den_scale0', 'vif_num_scale1', 'vif_den_scale1', 'vif_num_scale2', 'vif_den_scale2', 'vif_num_scale3',", "ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_ssim(yuv_type, ref_path, dis_path,", "VERSION = '0.1' ATOM_FEATURES = ['vifdiff', 'vifdiff_num', 'vifdiff_den', 'vifdiff_num_scale0', 'vifdiff_den_scale0',", "'ms_ssim_s_scale3', 'ms_ssim_l_scale4', 'ms_ssim_c_scale4', 'ms_ssim_s_scale4', ] def _generate_result(self, asset): # routine", "= cls.get_scores_key('adm_den_scale1') adm_num_scale2_scores_key = cls.get_scores_key('adm_num_scale2') adm_den_scale2_scores_key = cls.get_scores_key('adm_den_scale2') adm_num_scale3_scores_key =", "firstm = dis_y.mean() secondm = dis_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm])))", "firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1 dis_scores_mtx = np.vstack(scores_mtx_list) assert", "as np import ast from vmaf import ExternalProgramCaller, to_list from", "VERSION = '0.2.4' # Fix a bug in adm feature", "to c code ATOM_FEATURES = ['vif', 'adm', 'ansnr', 'motion', 'motion2',", "vif_scores_key = cls.get_scores_key('vif2') result.result_dict[vif_scores_key] = list( ( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key]))", "[] atom_feature_idx_dict[atom_feature] = 0 with open(log_file_path, 'rt') as log_file: for", "asset.quality_width_height ref_scores_mtx = None with YuvReader(filepath=asset.ref_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as", "== 2 # ref1st, ref2nd _, num_dis_features = dis_scores_mtx.shape assert", "disvar_scores_key = cls.get_scores_key('disvar') dis1st_scores_key = cls.get_scores_key('dis1st') dis2nd_scores_key = cls.get_scores_key('dis2nd') get_var", "only ATOM_FEATURES = ['ref1st', 'ref2nd', 'dis1st', 'dis2nd', ] DERIVED_ATOM_FEATURES =", "from ref1st, ref2nd, dis1st, dis2nd refvar_scores_key = cls.get_scores_key('refvar') ref1st_scores_key =", "result.result_dict[vif_scale1_scores_key] = list( (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) ) result.result_dict[vif_scale2_scores_key] = list(", "ATOM_FEATURES = ['ssim', 'ssim_l', 'ssim_c', 'ssim_s'] def _generate_result(self, asset): #", "log_dict = ast.literal_eval(log_str) ref_scores_mtx = np.array(log_dict['ref_scores_mtx']) dis_scores_mtx = np.array(log_dict['dis_scores_mtx']) _,", "_, num_ref_features = ref_scores_mtx.shape assert num_ref_features == 2 # ref1st,", "in the log file. quality_w, quality_h = asset.quality_width_height ref_scores_mtx =", "with YuvReader(filepath=asset.ref_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as ref_yuv_reader: scores_mtx_list = []", "follow VmafFeatureExtractor. \"\"\" __metaclass__ = ABCMeta @property @abstractmethod def ATOM_FEATURES(self):", "result.result_dict[adm_scale0_scores_key] = list( (np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT)", "i = 0 for dis_yuv in dis_yuv_reader: dis_y = dis_yuv[0]", "= \\ to_list(map(get_var, zip(result.result_dict[ref1st_scores_key], result.result_dict[ref2nd_scores_key]))) result.result_dict[disvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[dis1st_scores_key],", "= asset.quality_width_height log_file_path = self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height", "'vif_den_scale2', 'vif_num_scale3', 'vif_den_scale3', 'adm_num_scale0', 'adm_den_scale0', 'adm_num_scale1', 'adm_den_scale1', 'adm_num_scale2', 'adm_den_scale2', 'adm_num_scale3',", "atom_feature=atom_feature) @classmethod def get_score_key(cls, atom_feature): return \"{type}_{atom_feature}_score\".format( type=cls.TYPE, atom_feature=atom_feature) def", "= {} result.update(self._get_feature_scores(asset)) executor_id = self.executor_id return Result(asset, executor_id, result)", "= float(mo.group(2)) if np.isnan(val) or np.isinf(val): val = None atom_feature_scores_dict[atom_feature].append(val)", "into per coef # VERSION = '0.2.4' # Fix a", "2016-2018, Netflix, Inc.\" __license__ = \"Apache, Version 2.0\" import re", "and VERSION attribute), so that the Result generated by it", "{} for atom_feature in self.ATOM_FEATURES: atom_feature_scores_dict[atom_feature] = [] atom_feature_idx_dict[atom_feature] =", "= cls.get_scores_key('vif_scale0') vif_scale1_scores_key = cls.get_scores_key('vif_scale1') vif_scale2_scores_key = cls.get_scores_key('vif_scale2') vif_scale3_scores_key =", "list of assets, and run feature extraction on them, and", "len_score = len(atom_feature_scores_dict[self.ATOM_FEATURES[0]]) assert len_score != 0 for atom_feature in", "in the log file. quality_width, quality_height = asset.quality_width_height log_file_path =", ") result.result_dict[adm_scale2_scores_key] = list( (np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) +", "/ (np.array(result.result_dict[adm_den_scores_key]) + cls.ADM2_CONSTANT) ) # vif_scalei = vif_num_scalei /", "{} feature_result[self.get_scores_key('ref1st')] = list(ref_scores_mtx[:, 0]) feature_result[self.get_scores_key('ref2nd')] = list(ref_scores_mtx[:, 1]) feature_result[self.get_scores_key('dis1st')]", "= 0, 1, 2, 3 adm_num_scale0_scores_key = cls.get_scores_key('adm_num_scale0') adm_den_scale0_scores_key =", "# dis1st, dis2nd feature_result = {} feature_result[self.get_scores_key('ref1st')] = list(ref_scores_mtx[:, 0])", "yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_ssim(yuv_type, ref_path,", "= \"VMAF_feature\" # VERSION = '0.1' # vmaf_study; Anush's VIF", "num_dis_features == 2 # dis1st, dis2nd feature_result = {} feature_result[self.get_scores_key('ref1st')]", "it can be identified. A derived class of FeatureExtractor must:", "input YUV, updated VIF corner case # VERSION = '0.2.2b'", "/ (adm_den + ADM2_CONSTANT) adm2_scores_key = cls.get_scores_key('adm2') adm_num_scores_key = cls.get_scores_key('adm_num')", "= ref_yuv[0] firstm = ref_y.mean() secondm = ref_y.var() + firstm**2", "scores from the log file, and return the scores in", "# AVX for VMAF convolution; update adm features by folding", "vifdiff_scale1_scores_key = cls.get_scores_key('vifdiff_scale1') vifdiff_scale2_scores_key = cls.get_scores_key('vifdiff_scale2') vifdiff_scale3_scores_key = cls.get_scores_key('vifdiff_scale3') result.result_dict[vifdiff_scale0_scores_key]", "adm features by folding noise floor into per coef #", "= \"Moment_feature\" # VERSION = \"1.0\" # call executable VERSION", "log_str = log_file.read() log_dict = ast.literal_eval(log_str) ref_scores_mtx = np.array(log_dict['ref_scores_mtx']) dis_scores_mtx", "cls.get_scores_key('adm_num_scale3') adm_den_scale3_scores_key = cls.get_scores_key('adm_den_scale3') adm_scale0_scores_key = cls.get_scores_key('adm_scale0') adm_scale1_scores_key = cls.get_scores_key('adm_scale1')", "width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as ref_yuv_reader: scores_mtx_list = [] i =", "VERSION = \"1.0\" ATOM_FEATURES = ['psnr'] def _generate_result(self, asset): #", "result.result_dict[adm2_scores_key] = list( (np.array(result.result_dict[adm_num_scores_key]) + cls.ADM2_CONSTANT) / (np.array(result.result_dict[adm_den_scores_key]) + cls.ADM2_CONSTANT)", "'ms_ssim_s_scale0', 'ms_ssim_l_scale1', 'ms_ssim_c_scale1', 'ms_ssim_s_scale1', 'ms_ssim_l_scale2', 'ms_ssim_c_scale2', 'ms_ssim_s_scale2', 'ms_ssim_l_scale3', 'ms_ssim_c_scale3', 'ms_ssim_s_scale3',", "vif_num_scalei / vif_den_scalei, i = 0, 1, 2, 3 vif_num_scale0_scores_key", "'motion', 'motion2', 'vif_num', 'vif_den', 'adm_num', 'adm_den', 'anpsnr', 'vif_num_scale0', 'vif_den_scale0', 'vif_num_scale1',", "vif_den, adm_num, adm_den, anpsnr # VERSION = '0.2.1' # expose", "class VmafFeatureExtractor(FeatureExtractor): TYPE = \"VMAF_feature\" # VERSION = '0.1' #", "= [] i = 0 for dis_yuv in dis_yuv_reader: dis_y", "FeatureExtractor must specify a unique type and version combination (by", "list( (np.array(result.result_dict[adm_num_scores_key]) + cls.ADM2_CONSTANT) / (np.array(result.result_dict[adm_den_scores_key]) + cls.ADM2_CONSTANT) ) #", "add derived feature motion2 VERSION = '0.2.4c' # Modify by", "routine to call the command-line executable and generate quality #", "a command-line executable and generate feature scores in a log", "+ cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT)", "validate for feature in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in result.result_dict return", "result class VifFrameDifferenceFeatureExtractor(FeatureExtractor): TYPE = \"VifDiff_feature\" VERSION = '0.1' ATOM_FEATURES", "Fix a bug in adm feature passing scale into dwt_quant_step", "'vif_den_scale0', 'vif_num_scale1', 'vif_den_scale1', 'vif_num_scale2', 'vif_den_scale2', 'vif_num_scale3', 'vif_den_scale3', 'adm_num_scale0', 'adm_den_scale0', 'adm_num_scale1',", "MomentFeatureExtractor(FeatureExtractor): TYPE = \"Moment_feature\" # VERSION = \"1.0\" # call", "logger = self.logger ExternalProgramCaller.call_psnr(yuv_type, ref_path, dis_path, w, h, log_file_path, logger)", "value, allowing NaN and inf val = float(mo.group(2)) if np.isnan(val)", "'rt') as log_file: for line in log_file.readlines(): for atom_feature in", "in adm feature passing scale into dwt_quant_step # VERSION =", "np.array(result.result_dict[vif_den_scale0_scores_key])) ) result.result_dict[vif_scale1_scores_key] = list( (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) ) result.result_dict[vif_scale2_scores_key]", "# VERSION = '0.2.3' # AVX for VMAF convolution; update", "( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) + (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) + (np.array(result.result_dict[vif_num_scale2_scores_key])", "/ (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key])", "Run cleanup script and try again.\" feature_result = {} for", "= # (adm_num + ADM2_CONSTANT) / (adm_den + ADM2_CONSTANT) adm2_scores_key", "class MsSsimFeatureExtractor(FeatureExtractor): TYPE = \"MS_SSIM_feature\" # VERSION = \"1.0\" VERSION", "import ABCMeta, abstractmethod import os from vmaf.tools.misc import make_absolute_path, run_process", "Executor._post_process_result result = super(VifFrameDifferenceFeatureExtractor, cls)._post_process_result(result) # vifdiff_scalei = vifdiff_num_scalei /", "ExternalProgramCaller.call_vifdiff_feature(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) @classmethod def _post_process_result(cls,", "+= 1 continue len_score = len(atom_feature_scores_dict[self.ATOM_FEATURES[0]]) assert len_score != 0", "= cls.get_scores_key('vifdiff_den_scale0') vifdiff_num_scale1_scores_key = cls.get_scores_key('vifdiff_num_scale1') vifdiff_den_scale1_scores_key = cls.get_scores_key('vifdiff_den_scale1') vifdiff_num_scale2_scores_key =", "/ np.array(result.result_dict[vifdiff_den_scale0_scores_key])) ) result.result_dict[vifdiff_scale1_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale1_scores_key]) / np.array(result.result_dict[vifdiff_den_scale1_scores_key])) )", "dis_yuv[0] firstm = dis_y.mean() secondm = dis_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm],", "the command-line executable and generate quality # scores in the", "cls.get_scores_key('vif_num_scale0') vif_den_scale0_scores_key = cls.get_scores_key('vif_den_scale0') vif_num_scale1_scores_key = cls.get_scores_key('vif_num_scale1') vif_den_scale1_scores_key = cls.get_scores_key('vif_den_scale1')", "2, 3 vif_num_scale0_scores_key = cls.get_scores_key('vif_num_scale0') vif_den_scale0_scores_key = cls.get_scores_key('vif_den_scale0') vif_num_scale1_scores_key =", "(np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) / 4.0 ) # adm_scalei =", "height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as ref_yuv_reader: scores_mtx_list = [] i = 0", "result) @classmethod def get_scores_key(cls, atom_feature): return \"{type}_{atom_feature}_scores\".format( type=cls.TYPE, atom_feature=atom_feature) @classmethod", "vifdiff_den_scale2_scores_key = cls.get_scores_key('vifdiff_den_scale2') vifdiff_num_scale3_scores_key = cls.get_scores_key('vifdiff_num_scale3') vifdiff_den_scale3_scores_key = cls.get_scores_key('vifdiff_den_scale3') vifdiff_scale0_scores_key", "ref_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1 ref_scores_mtx =", "import ast from vmaf import ExternalProgramCaller, to_list from vmaf.config import", "w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_vifdiff_feature(yuv_type, ref_path, dis_path, w, h,", "ATOM_FEATURES = ['psnr'] def _generate_result(self, asset): # routine to call", "result.result_dict[vif_scores_key] = list( ( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) + (np.array(result.result_dict[vif_num_scale1_scores_key]) /", "__license__ = \"Apache, Version 2.0\" import re import numpy as", "result.result_dict[vif_scale0_scores_key] = list( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) ) result.result_dict[vif_scale1_scores_key] = list(", "\"VMAF_feature\" # VERSION = '0.1' # vmaf_study; Anush's VIF fix", "read the feature scores from the log file, and return", "cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in result.result_dict return result class SsimFeatureExtractor(FeatureExtractor): TYPE", "derived class of FeatureExtractor must: 1) Override TYPE and VERSION", "scores in a dictionary format. For an example, follow VmafFeatureExtractor.", "adm_num_scale0_scores_key = cls.get_scores_key('adm_num_scale0') adm_den_scale0_scores_key = cls.get_scores_key('adm_den_scale0') adm_num_scale1_scores_key = cls.get_scores_key('adm_num_scale1') adm_den_scale1_scores_key", "0 for atom_feature in self.ATOM_FEATURES[1:]: assert len_score == len(atom_feature_scores_dict[atom_feature]), \\", "as dis_yuv_reader: scores_mtx_list = [] i = 0 for dis_yuv", "4.0 ) # adm_scalei = adm_num_scalei / adm_den_scalei, i =", "(np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale2_scores_key] = list( (np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT)", "feature_result[self.get_scores_key('dis2nd')] = list(dis_scores_mtx[:, 1]) return feature_result @classmethod def _post_process_result(cls, result):", "# VERSION = '0.2.4' # Fix a bug in adm", "cls.get_scores_key('vif_scale3') result.result_dict[vif_scale0_scores_key] = list( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) ) result.result_dict[vif_scale1_scores_key] =", "list( (np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale2_scores_key]", "vifdiff_scale2_scores_key = cls.get_scores_key('vifdiff_scale2') vifdiff_scale3_scores_key = cls.get_scores_key('vifdiff_scale3') result.result_dict[vifdiff_scale0_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale0_scores_key])", "cls.get_scores_key(feature) in result.result_dict return result class VifFrameDifferenceFeatureExtractor(FeatureExtractor): TYPE = \"VifDiff_feature\"", "@classmethod def _post_process_result(cls, result): # override Executor._post_process_result result = super(VmafFeatureExtractor,", "of offset for input YUV, updated VIF corner case #", "= '0.2.4b' # Modify by adding ADM noise floor outside", "{} atom_feature_idx_dict = {} for atom_feature in self.ATOM_FEATURES: atom_feature_scores_dict[atom_feature] =", "on them, and return a list of corresponding results. A", "atom_feature_scores_dict[atom_feature] = [] atom_feature_idx_dict[atom_feature] = 0 with open(log_file_path, 'rt') as", "with open(log_file_path, 'wt') as log_file: log_file.write(str(log_dict)) def _get_feature_scores(self, asset): #", "reading with option of offset for input YUV, updated VIF", "class PsnrFeatureExtractor(FeatureExtractor): TYPE = \"PSNR_feature\" VERSION = \"1.0\" ATOM_FEATURES =", "w, h, log_file_path, logger) class MsSsimFeatureExtractor(FeatureExtractor): TYPE = \"MS_SSIM_feature\" #", "= '0.2.4' # Fix a bug in adm feature passing", "VERSION = '0.2.2' # adm abs-->fabs, corrected border handling, uniform", "adm3 = \\ # (((adm_num_scale0 + ADM_SCALE_CONSTANT) / (adm_den_scale0 +", "self.ATOM_FEATURES: atom_feature_scores_dict[atom_feature] = [] atom_feature_idx_dict[atom_feature] = 0 with open(log_file_path, 'rt')", "ADM2_CONSTANT) / (adm_den + ADM2_CONSTANT) adm2_scores_key = cls.get_scores_key('adm2') adm_num_scores_key =", "= None with YuvReader(filepath=asset.dis_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as dis_yuv_reader: scores_mtx_list", "= list( ( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) + (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key]))", "and try again.\" feature_result = {} for atom_feature in self.ATOM_FEATURES:", "and dis_scores_mtx is not None log_dict = {'ref_scores_mtx': ref_scores_mtx.tolist(), 'dis_scores_mtx':", "log_file_path = self._get_log_file_path(asset) with open(log_file_path, 'rt') as log_file: log_str =", "VifFrameDifferenceFeatureExtractor(FeatureExtractor): TYPE = \"VifDiff_feature\" VERSION = '0.1' ATOM_FEATURES = ['vifdiff',", "i = 0 for ref_yuv in ref_yuv_reader: ref_y = ref_yuv[0]", "= 0 for dis_yuv in dis_yuv_reader: dis_y = dis_yuv[0] firstm", "ref_yuv_reader: scores_mtx_list = [] i = 0 for ref_yuv in", "= list( (np.array(result.result_dict[vifdiff_num_scale1_scores_key]) / np.array(result.result_dict[vifdiff_den_scale1_scores_key])) ) result.result_dict[vifdiff_scale2_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale2_scores_key])", "dictionary format. For an example, follow VmafFeatureExtractor. \"\"\" __metaclass__ =", "VmafFeatureExtractor(FeatureExtractor): TYPE = \"VMAF_feature\" # VERSION = '0.1' # vmaf_study;", "+ cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale2_scores_key] = list( (np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) /", "\\ to_list(map(get_var, zip(result.result_dict[dis1st_scores_key], result.result_dict[dis2nd_scores_key]))) # validate for feature in cls.DERIVED_ATOM_FEATURES:", "dis_scores_mtx.tolist()} log_file_path = self._get_log_file_path(asset) with open(log_file_path, 'wt') as log_file: log_file.write(str(log_dict))", "0 for dis_yuv in dis_yuv_reader: dis_y = dis_yuv[0] firstm =", "\"\"\" __metaclass__ = ABCMeta @property @abstractmethod def ATOM_FEATURES(self): raise NotImplementedError", "ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_ms_ssim(yuv_type, ref_path, dis_path,", "= list(dis_scores_mtx[:, 1]) return feature_result @classmethod def _post_process_result(cls, result): #", "cls.get_scores_key('adm_scale3') result.result_dict[adm_scale0_scores_key] = list( (np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) +", "motion2 VERSION = '0.2.4c' # Modify by moving motion2 to", "= ['refvar', 'disvar', ] def _generate_result(self, asset): # routine to", "['ssim', 'ssim_l', 'ssim_c', 'ssim_s'] def _generate_result(self, asset): # routine to", "= dis_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1 dis_scores_mtx", "super(VifFrameDifferenceFeatureExtractor, cls)._post_process_result(result) # vifdiff_scalei = vifdiff_num_scalei / vifdiff_den_scalei, i =", "4.0 vif_scores_key = cls.get_scores_key('vif2') result.result_dict[vif_scores_key] = list( ( (np.array(result.result_dict[vif_num_scale0_scores_key]) /", "vif_scale0_scores_key = cls.get_scores_key('vif_scale0') vif_scale1_scores_key = cls.get_scores_key('vif_scale1') vif_scale2_scores_key = cls.get_scores_key('vif_scale2') vif_scale3_scores_key", "\"1.1\" # fix OPT_RANGE_PIXEL_OFFSET = 0 ATOM_FEATURES = ['ms_ssim', 'ms_ssim_l_scale0',", "self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_vifdiff_feature(yuv_type,", "the log file. quality_w, quality_h = asset.quality_width_height ref_scores_mtx = None", "= '0.2.2b' # expose adm_den/num_scalex # VERSION = '0.2.3' #", "[secondm]))) i += 1 dis_scores_mtx = np.vstack(scores_mtx_list) assert ref_scores_mtx is", "ast from vmaf import ExternalProgramCaller, to_list from vmaf.config import VmafConfig,", "expose vif num/den of each scale # VERSION = '0.2.2'", "FeatureExtractor takes in a list of assets, and run feature", "in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in result.result_dict return result class PsnrFeatureExtractor(FeatureExtractor):", "offset for input YUV, updated VIF corner case # VERSION", "dictionary format. log_file_path = self._get_log_file_path(asset) atom_feature_scores_dict = {} atom_feature_idx_dict =", "log_file_path, logger) class MsSsimFeatureExtractor(FeatureExtractor): TYPE = \"MS_SSIM_feature\" # VERSION =", "adm_scale0_scores_key = cls.get_scores_key('adm_scale0') adm_scale1_scores_key = cls.get_scores_key('adm_scale1') adm_scale2_scores_key = cls.get_scores_key('adm_scale2') adm_scale3_scores_key", "'vif_num_scale2', 'vif_den_scale2', 'vif_num_scale3', 'vif_den_scale3', 'adm_num_scale0', 'adm_den_scale0', 'adm_num_scale1', 'adm_den_scale1', 'adm_num_scale2', 'adm_den_scale2',", "+ cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) ) # adm3 =", "np.array(result.result_dict[vif_den_scale1_scores_key])) ) result.result_dict[vif_scale2_scores_key] = list( (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) ) result.result_dict[vif_scale3_scores_key]", "= ABCMeta @property @abstractmethod def ATOM_FEATURES(self): raise NotImplementedError def _read_result(self,", "adm_scale3_scores_key = cls.get_scores_key('adm_scale3') result.result_dict[adm_scale0_scores_key] = list( (np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) /", "result.result_dict[disvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[dis1st_scores_key], result.result_dict[dis2nd_scores_key]))) # validate for feature", "Anush's VIF fix # VERSION = '0.2' # expose vif_num,", "@classmethod def _post_process_result(cls, result): # override Executor._post_process_result result = super(MomentFeatureExtractor,", "firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1 ref_scores_mtx = np.vstack(scores_mtx_list) dis_scores_mtx", "+ ((np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT)) ) /", "= np.array(log_dict['dis_scores_mtx']) _, num_ref_features = ref_scores_mtx.shape assert num_ref_features == 2", "'adm_num_scale2', 'adm_den_scale2', 'adm_num_scale3', 'adm_den_scale3', ] DERIVED_ATOM_FEATURES = ['vif_scale0', 'vif_scale1', 'vif_scale2',", "executable and generate feature # scores in the log file.", "cls.get_scores_key('vif_scale1') vif_scale2_scores_key = cls.get_scores_key('vif_scale2') vif_scale3_scores_key = cls.get_scores_key('vif_scale3') result.result_dict[vif_scale0_scores_key] = list(", "scores in a dictionary format. log_file_path = self._get_log_file_path(asset) with open(log_file_path,", "results. A FeatureExtractor must specify a unique type and version", "\"\"\" FeatureExtractor takes in a list of assets, and run", "atom_feature_idx_dict[atom_feature] += 1 continue len_score = len(atom_feature_scores_dict[self.ATOM_FEATURES[0]]) assert len_score !=", "attribute), so that the Result generated by it can be", "logger) @classmethod def _post_process_result(cls, result): # override Executor._post_process_result result =", "result = super(VifFrameDifferenceFeatureExtractor, cls)._post_process_result(result) # vifdiff_scalei = vifdiff_num_scalei / vifdiff_den_scalei,", "= list( (np.array(result.result_dict[vifdiff_num_scale3_scores_key]) / np.array(result.result_dict[vifdiff_den_scale3_scores_key])) ) # validate for feature", "executable and generate quality # scores in the log file.", "adm2 = # (adm_num + ADM2_CONSTANT) / (adm_den + ADM2_CONSTANT)", "result.result_dict[vifdiff_scale2_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale2_scores_key]) / np.array(result.result_dict[vifdiff_den_scale2_scores_key])) ) result.result_dict[vifdiff_scale3_scores_key] = list(", "+ ((np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale3_scores_key])", "assert num_ref_features == 2 # ref1st, ref2nd _, num_dis_features =", "= list( (np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) )", "'ssim_c', 'ssim_s'] def _generate_result(self, asset): # routine to call the", "ListStats __copyright__ = \"Copyright 2016-2018, Netflix, Inc.\" __license__ = \"Apache,", "'vif_num_scale3', 'vif_den_scale3', 'adm_num_scale0', 'adm_den_scale0', 'adm_num_scale1', 'adm_den_scale1', 'adm_num_scale2', 'adm_den_scale2', 'adm_num_scale3', 'adm_den_scale3',", "'adm_num_scale3', 'adm_den_scale3', ] DERIVED_ATOM_FEATURES = ['vif_scale0', 'vif_scale1', 'vif_scale2', 'vif_scale3', 'vif2',", "vifdiff_scale0_scores_key = cls.get_scores_key('vifdiff_scale0') vifdiff_scale1_scores_key = cls.get_scores_key('vifdiff_scale1') vifdiff_scale2_scores_key = cls.get_scores_key('vifdiff_scale2') vifdiff_scale3_scores_key", "= \"1.0\" ATOM_FEATURES = ['psnr'] def _generate_result(self, asset): # routine", "= np.vstack(scores_mtx_list) assert ref_scores_mtx is not None and dis_scores_mtx is", "corresponding results. A FeatureExtractor must specify a unique type and", "log_dict = {'ref_scores_mtx': ref_scores_mtx.tolist(), 'dis_scores_mtx': dis_scores_mtx.tolist()} log_file_path = self._get_log_file_path(asset) with", "and version combination (by the TYPE and VERSION attribute), so", "= 0 ATOM_FEATURES = ['ms_ssim', 'ms_ssim_l_scale0', 'ms_ssim_c_scale0', 'ms_ssim_s_scale0', 'ms_ssim_l_scale1', 'ms_ssim_c_scale1',", "(np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key])) ) result.result_dict[vif_scale1_scores_key] = list( (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key]))", "= list( ( ((np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT))", "\"VifDiff_feature\" VERSION = '0.1' ATOM_FEATURES = ['vifdiff', 'vifdiff_num', 'vifdiff_den', 'vifdiff_num_scale0',", "scale # VERSION = '0.2.2' # adm abs-->fabs, corrected border", "self.logger ExternalProgramCaller.call_psnr(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) class MomentFeatureExtractor(FeatureExtractor):", "return feature_result class VmafFeatureExtractor(FeatureExtractor): TYPE = \"VMAF_feature\" # VERSION =", "np.array(log_dict['ref_scores_mtx']) dis_scores_mtx = np.array(log_dict['dis_scores_mtx']) _, num_ref_features = ref_scores_mtx.shape assert num_ref_features", "continue len_score = len(atom_feature_scores_dict[self.ATOM_FEATURES[0]]) assert len_score != 0 for atom_feature", "cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT)) +", "= cls.get_scores_key('vif_num_scale3') vif_den_scale3_scores_key = cls.get_scores_key('vif_den_scale3') vif_scale0_scores_key = cls.get_scores_key('vif_scale0') vif_scale1_scores_key =", "] DERIVED_ATOM_FEATURES = ['vifdiff_scale0', 'vifdiff_scale1', 'vifdiff_scale2', 'vifdiff_scale3', ] ADM2_CONSTANT =", "= \"1.1\" # python only ATOM_FEATURES = ['ref1st', 'ref2nd', 'dis1st',", "'adm_den_scale0', 'adm_num_scale1', 'adm_den_scale1', 'adm_num_scale2', 'adm_den_scale2', 'adm_num_scale3', 'adm_den_scale3', ] DERIVED_ATOM_FEATURES =", "a dictionary format. log_file_path = self._get_log_file_path(asset) atom_feature_scores_dict = {} atom_feature_idx_dict", "- m[0] * m[0] result.result_dict[refvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[ref1st_scores_key], result.result_dict[ref2nd_scores_key])))", "cls.get_scores_key('vif_num_scale2') vif_den_scale2_scores_key = cls.get_scores_key('vif_den_scale2') vif_num_scale3_scores_key = cls.get_scores_key('vif_num_scale3') vif_den_scale3_scores_key = cls.get_scores_key('vif_den_scale3')", "'vifdiff_den_scale2', 'vifdiff_num_scale3', 'vifdiff_den_scale3', ] DERIVED_ATOM_FEATURES = ['vifdiff_scale0', 'vifdiff_scale1', 'vifdiff_scale2', 'vifdiff_scale3',", "them, and return a list of corresponding results. A FeatureExtractor", "per coef # VERSION = '0.2.4' # Fix a bug", "vif_num_scale1_scores_key = cls.get_scores_key('vif_num_scale1') vif_den_scale1_scores_key = cls.get_scores_key('vif_den_scale1') vif_num_scale2_scores_key = cls.get_scores_key('vif_num_scale2') vif_den_scale2_scores_key", "/ (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale3_scores_key] = list( (np.array(result.result_dict[adm_num_scale3_scores_key]) +", "# routine to call the command-line executable and generate feature", "+ cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale1_scores_key] = list(", "cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in result.result_dict return result class PsnrFeatureExtractor(FeatureExtractor): TYPE", "NaN and inf val = float(mo.group(2)) if np.isnan(val) or np.isinf(val):", "'ssim_s'] def _generate_result(self, asset): # routine to call the command-line", "list( (np.array(result.result_dict[adm_num_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale1_scores_key]", "= list( (np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) )", "np.array(log_dict['dis_scores_mtx']) _, num_ref_features = ref_scores_mtx.shape assert num_ref_features == 2 #", "self.get_scores_key(atom_feature) feature_result[scores_key] = atom_feature_scores_dict[atom_feature] return feature_result class VmafFeatureExtractor(FeatureExtractor): TYPE =", "list(ref_scores_mtx[:, 1]) feature_result[self.get_scores_key('dis1st')] = list(dis_scores_mtx[:, 0]) feature_result[self.get_scores_key('dis2nd')] = list(dis_scores_mtx[:, 1])", "np.array(result.result_dict[vifdiff_den_scale2_scores_key])) ) result.result_dict[vifdiff_scale3_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale3_scores_key]) / np.array(result.result_dict[vifdiff_den_scale3_scores_key])) ) #", "1) Override TYPE and VERSION 2) Override _generate_result(self, asset), which", "cls.get_scores_key('vifdiff_scale1') vifdiff_scale2_scores_key = cls.get_scores_key('vifdiff_scale2') vifdiff_scale3_scores_key = cls.get_scores_key('vifdiff_scale3') result.result_dict[vifdiff_scale0_scores_key] = list(", "by adding ADM noise floor outside cube root; add derived", "+ (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) + (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key])) + (np.array(result.result_dict[vif_num_scale3_scores_key])", "(np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale1_scores_key] = list( (np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT)", "+ (np.array(result.result_dict[vif_num_scale3_scores_key]) / np.array(result.result_dict[vif_den_scale3_scores_key])) ) / 4.0 ) # adm_scalei", "and generate quality # scores in the log file. quality_width,", "script and try again.\" feature_result = {} for atom_feature in", "DERIVED_ATOM_FEATURES = ['refvar', 'disvar', ] def _generate_result(self, asset): # routine", "is not None and dis_scores_mtx is not None log_dict =", "/ (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT)) ) / 4.0 ) # validate", "feature_result @classmethod def _post_process_result(cls, result): # override Executor._post_process_result result =", "refvar and disvar from ref1st, ref2nd, dis1st, dis2nd refvar_scores_key =", "class FeatureExtractor(Executor): \"\"\" FeatureExtractor takes in a list of assets,", "import re import numpy as np import ast from vmaf", "VIF corner case # VERSION = '0.2.2b' # expose adm_den/num_scalex", "list(dis_scores_mtx[:, 0]) feature_result[self.get_scores_key('dis2nd')] = list(dis_scores_mtx[:, 1]) return feature_result @classmethod def", "the command-line executable and generate feature # scores in the", "cls.get_scores_key('vif_scale2') vif_scale3_scores_key = cls.get_scores_key('vif_scale3') result.result_dict[vif_scale0_scores_key] = list( (np.array(result.result_dict[vif_num_scale0_scores_key]) / np.array(result.result_dict[vif_den_scale0_scores_key]))", "for dis_yuv in dis_yuv_reader: dis_y = dis_yuv[0] firstm = dis_y.mean()", "result.result_dict return result class SsimFeatureExtractor(FeatureExtractor): TYPE = \"SSIM_feature\" # VERSION", "cls.get_scores_key('disvar') dis1st_scores_key = cls.get_scores_key('dis1st') dis2nd_scores_key = cls.get_scores_key('dis2nd') get_var = lambda", "\"{type}_{atom_feature}_scores\".format( type=cls.TYPE, atom_feature=atom_feature) @classmethod def get_score_key(cls, atom_feature): return \"{type}_{atom_feature}_score\".format( type=cls.TYPE,", "yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_vifdiff_feature(yuv_type, ref_path,", "log_file: for line in log_file.readlines(): for atom_feature in self.ATOM_FEATURES: re_template", "YuvReader(filepath=asset.ref_workfile_path, width=quality_w, height=quality_h, yuv_type=self._get_workfile_yuv_type(asset)) as ref_yuv_reader: scores_mtx_list = [] i", "np.array(result.result_dict[vifdiff_den_scale3_scores_key])) ) # validate for feature in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature)", "from the log file, and return # the scores in", "scores in the log file. quality_w, quality_h = asset.quality_width_height ref_scores_mtx", "log_file_path = self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger =", "type=cls.TYPE, atom_feature=atom_feature) @classmethod def get_score_key(cls, atom_feature): return \"{type}_{atom_feature}_score\".format( type=cls.TYPE, atom_feature=atom_feature)", "+ cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale2_scores_key] = list(", "cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in result.result_dict return result class VifFrameDifferenceFeatureExtractor(FeatureExtractor): TYPE", "_, num_dis_features = dis_scores_mtx.shape assert num_dis_features == 2 # dis1st,", "= # ((vif_num_scale0 / vif_den_scale0) + (vif_num_scale1 / vif_den_scale1) +", "result.result_dict[vifdiff_scale3_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale3_scores_key]) / np.array(result.result_dict[vifdiff_den_scale3_scores_key])) ) # validate for", "the scores in a dictionary format. log_file_path = self._get_log_file_path(asset) with", "and disvar from ref1st, ref2nd, dis1st, dis2nd refvar_scores_key = cls.get_scores_key('refvar')", "= cls.get_scores_key('adm_den') result.result_dict[adm2_scores_key] = list( (np.array(result.result_dict[adm_num_scores_key]) + cls.ADM2_CONSTANT) / (np.array(result.result_dict[adm_den_scores_key])", "so that the Result generated by it can be identified.", "cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) ) result.result_dict[adm_scale3_scores_key] = list( (np.array(result.result_dict[adm_num_scale3_scores_key])", "'ms_ssim_s_scale1', 'ms_ssim_l_scale2', 'ms_ssim_c_scale2', 'ms_ssim_s_scale2', 'ms_ssim_l_scale3', 'ms_ssim_c_scale3', 'ms_ssim_s_scale3', 'ms_ssim_l_scale4', 'ms_ssim_c_scale4', 'ms_ssim_s_scale4',", "Inc.\" __license__ = \"Apache, Version 2.0\" import re import numpy", "abstractmethod import os from vmaf.tools.misc import make_absolute_path, run_process from vmaf.tools.stats", "must specify a unique type and version combination (by the", "get_score_key(cls, atom_feature): return \"{type}_{atom_feature}_score\".format( type=cls.TYPE, atom_feature=atom_feature) def _get_feature_scores(self, asset): #", "c code ATOM_FEATURES = ['vif', 'adm', 'ansnr', 'motion', 'motion2', 'vif_num',", "= vifdiff_num_scalei / vifdiff_den_scalei, i = 0, 1, 2, 3", "disvar from ref1st, ref2nd, dis1st, dis2nd refvar_scores_key = cls.get_scores_key('refvar') ref1st_scores_key", "0 ATOM_FEATURES = ['ms_ssim', 'ms_ssim_l_scale0', 'ms_ssim_c_scale0', 'ms_ssim_s_scale0', 'ms_ssim_l_scale1', 'ms_ssim_c_scale1', 'ms_ssim_s_scale1',", "folding noise floor into per coef # VERSION = '0.2.4'", "in a list of assets, and run feature extraction on", "quality_w, quality_h = asset.quality_width_height ref_scores_mtx = None with YuvReader(filepath=asset.ref_workfile_path, width=quality_w,", "= re.match(re_template, line) if mo: cur_idx = int(mo.group(1)) assert cur_idx", "/ 4.0 vif_scores_key = cls.get_scores_key('vif2') result.result_dict[vif_scores_key] = list( ( (np.array(result.result_dict[vif_num_scale0_scores_key])", "feature_result class VmafFeatureExtractor(FeatureExtractor): TYPE = \"VMAF_feature\" # VERSION = '0.1'", "file. quality_w, quality_h = asset.quality_width_height ref_scores_mtx = None with YuvReader(filepath=asset.ref_workfile_path,", "+ ADM_SCALE_CONSTANT)) # + ((adm_num_scale2 + ADM_SCALE_CONSTANT) / (adm_den_scale2 +", "'ms_ssim_l_scale4', 'ms_ssim_c_scale4', 'ms_ssim_s_scale4', ] def _generate_result(self, asset): # routine to", "None and dis_scores_mtx is not None log_dict = {'ref_scores_mtx': ref_scores_mtx.tolist(),", "cls.ADM_SCALE_CONSTANT) ) # adm3 = \\ # (((adm_num_scale0 + ADM_SCALE_CONSTANT)", "call the command-line executable and generate quality # scores in", "ABCMeta @property @abstractmethod def ATOM_FEATURES(self): raise NotImplementedError def _read_result(self, asset):", "and generate feature scores in a log file. 3) Override", "+ ((np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale2_scores_key])", "FeatureExtractor(Executor): \"\"\" FeatureExtractor takes in a list of assets, and", "ref1st_scores_key = cls.get_scores_key('ref1st') ref2nd_scores_key = cls.get_scores_key('ref2nd') disvar_scores_key = cls.get_scores_key('disvar') dis1st_scores_key", "\"{type}_{atom_feature}_score\".format( type=cls.TYPE, atom_feature=atom_feature) def _get_feature_scores(self, asset): # routine to read", "dis_scores_mtx = np.array(log_dict['dis_scores_mtx']) _, num_ref_features = ref_scores_mtx.shape assert num_ref_features ==", "asset), which read the feature scores from the log file,", "= cls.get_scores_key('dis1st') dis2nd_scores_key = cls.get_scores_key('dis2nd') get_var = lambda m: m[1]", "executor_id, result) @classmethod def get_scores_key(cls, atom_feature): return \"{type}_{atom_feature}_scores\".format( type=cls.TYPE, atom_feature=atom_feature)", "= {} feature_result[self.get_scores_key('ref1st')] = list(ref_scores_mtx[:, 0]) feature_result[self.get_scores_key('ref2nd')] = list(ref_scores_mtx[:, 1])", "MsSsimFeatureExtractor(FeatureExtractor): TYPE = \"MS_SSIM_feature\" # VERSION = \"1.0\" VERSION =", "Result generated by it can be identified. A derived class", "VERSION attribute), so that the Result generated by it can", "log_file.read() log_dict = ast.literal_eval(log_str) ref_scores_mtx = np.array(log_dict['ref_scores_mtx']) dis_scores_mtx = np.array(log_dict['dis_scores_mtx'])", "= [] i = 0 for ref_yuv in ref_yuv_reader: ref_y", "w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_vmaf_feature(yuv_type, ref_path, dis_path, w, h,", "of FeatureExtractor must: 1) Override TYPE and VERSION 2) Override", "run feature extraction on them, and return a list of", "scores in the log file. quality_width, quality_height = asset.quality_width_height log_file_path", "vifdiff_num_scale0_scores_key = cls.get_scores_key('vifdiff_num_scale0') vifdiff_den_scale0_scores_key = cls.get_scores_key('vifdiff_den_scale0') vifdiff_num_scale1_scores_key = cls.get_scores_key('vifdiff_num_scale1') vifdiff_den_scale1_scores_key", "'0.2.2' # adm abs-->fabs, corrected border handling, uniform reading with", "from vmaf.tools.reader import YuvReader class FeatureExtractor(Executor): \"\"\" FeatureExtractor takes in", "executable and generate feature scores in a log file. 3)", "ref_yuv in ref_yuv_reader: ref_y = ref_yuv[0] firstm = ref_y.mean() secondm", "2, 3 vifdiff_num_scale0_scores_key = cls.get_scores_key('vifdiff_num_scale0') vifdiff_den_scale0_scores_key = cls.get_scores_key('vifdiff_den_scale0') vifdiff_num_scale1_scores_key =", "self.ATOM_FEATURES: scores_key = self.get_scores_key(atom_feature) feature_result[scores_key] = atom_feature_scores_dict[atom_feature] return feature_result class", "((adm_num_scale3 + ADM_SCALE_CONSTANT) / (adm_den_scale3 + ADM_SCALE_CONSTANT))) / 4.0 adm3_scores_key", "TYPE = \"VMAF_feature\" # VERSION = '0.1' # vmaf_study; Anush's", "\\ # (((adm_num_scale0 + ADM_SCALE_CONSTANT) / (adm_den_scale0 + ADM_SCALE_CONSTANT)) #", "result.result_dict[refvar_scores_key] = \\ to_list(map(get_var, zip(result.result_dict[ref1st_scores_key], result.result_dict[ref2nd_scores_key]))) result.result_dict[disvar_scores_key] = \\ to_list(map(get_var,", "+ ADM_SCALE_CONSTANT)) # + ((adm_num_scale1 + ADM_SCALE_CONSTANT) / (adm_den_scale1 +", "[] i = 0 for dis_yuv in dis_yuv_reader: dis_y =", "/ (np.array(result.result_dict[adm_den_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key])", "AVX for VMAF convolution; update adm features by folding noise", "ref1st, ref2nd, dis1st, dis2nd refvar_scores_key = cls.get_scores_key('refvar') ref1st_scores_key = cls.get_scores_key('ref1st')", "] DERIVED_ATOM_FEATURES = ['refvar', 'disvar', ] def _generate_result(self, asset): #", ") result.result_dict[vif_scale1_scores_key] = list( (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) ) result.result_dict[vif_scale2_scores_key] =", "None log_dict = {'ref_scores_mtx': ref_scores_mtx.tolist(), 'dis_scores_mtx': dis_scores_mtx.tolist()} log_file_path = self._get_log_file_path(asset)", "scores from the log file, and return # the scores", "ref_yuv_reader: ref_y = ref_yuv[0] firstm = ref_y.mean() secondm = ref_y.var()", "+ ((adm_num_scale1 + ADM_SCALE_CONSTANT) / (adm_den_scale1 + ADM_SCALE_CONSTANT)) # +", "'0.2.4c' # Modify by moving motion2 to c code ATOM_FEATURES", "derived feature motion2 VERSION = '0.2.4c' # Modify by moving", "= list( (np.array(result.result_dict[adm_num_scores_key]) + cls.ADM2_CONSTANT) / (np.array(result.result_dict[adm_den_scores_key]) + cls.ADM2_CONSTANT) )", "cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale2_scores_key]) + cls.ADM_SCALE_CONSTANT) /", "@classmethod def get_scores_key(cls, atom_feature): return \"{type}_{atom_feature}_scores\".format( type=cls.TYPE, atom_feature=atom_feature) @classmethod def", "of each scale # VERSION = '0.2.2' # adm abs-->fabs,", "in a dictionary format. log_file_path = self._get_log_file_path(asset) atom_feature_scores_dict = {}", "VERSION = '0.2.1' # expose vif num/den of each scale", "# (adm_num + ADM2_CONSTANT) / (adm_den + ADM2_CONSTANT) adm2_scores_key =", "ref_scores_mtx.tolist(), 'dis_scores_mtx': dis_scores_mtx.tolist()} log_file_path = self._get_log_file_path(asset) with open(log_file_path, 'wt') as", "VERSION = '0.2.2b' # expose adm_den/num_scalex # VERSION = '0.2.3'", "\"Apache, Version 2.0\" import re import numpy as np import", ") / 4.0 ) # validate for feature in cls.DERIVED_ATOM_FEATURES:", "# validate for feature in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in result.result_dict", "cls.get_scores_key(feature) in result.result_dict return result class SsimFeatureExtractor(FeatureExtractor): TYPE = \"SSIM_feature\"", "ADM_SCALE_CONSTANT)) # + ((adm_num_scale3 + ADM_SCALE_CONSTANT) / (adm_den_scale3 + ADM_SCALE_CONSTANT)))", "'vifdiff_den_scale3', ] DERIVED_ATOM_FEATURES = ['vifdiff_scale0', 'vifdiff_scale1', 'vifdiff_scale2', 'vifdiff_scale3', ] ADM2_CONSTANT", "= \"{af}: ([0-9]+) ([a-zA-Z0-9.-]+)\".format(af=atom_feature) mo = re.match(re_template, line) if mo:", "dis_path, w, h, log_file_path, logger) class MsSsimFeatureExtractor(FeatureExtractor): TYPE = \"MS_SSIM_feature\"", "to_list(map(get_var, zip(result.result_dict[dis1st_scores_key], result.result_dict[dis2nd_scores_key]))) # validate for feature in cls.DERIVED_ATOM_FEATURES: assert", "cls.get_scores_key('adm_scale1') adm_scale2_scores_key = cls.get_scores_key('adm_scale2') adm_scale3_scores_key = cls.get_scores_key('adm_scale3') result.result_dict[adm_scale0_scores_key] = list(", "cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT)) ) / 4.0 ) #", "= cls.get_scores_key('ref2nd') disvar_scores_key = cls.get_scores_key('disvar') dis1st_scores_key = cls.get_scores_key('dis1st') dis2nd_scores_key =", "/ np.array(result.result_dict[vif_den_scale0_scores_key])) + (np.array(result.result_dict[vif_num_scale1_scores_key]) / np.array(result.result_dict[vif_den_scale1_scores_key])) + (np.array(result.result_dict[vif_num_scale2_scores_key]) / np.array(result.result_dict[vif_den_scale2_scores_key]))", "float(mo.group(2)) if np.isnan(val) or np.isinf(val): val = None atom_feature_scores_dict[atom_feature].append(val) atom_feature_idx_dict[atom_feature]", "if mo: cur_idx = int(mo.group(1)) assert cur_idx == atom_feature_idx_dict[atom_feature] #", "vif_den_scale0_scores_key = cls.get_scores_key('vif_den_scale0') vif_num_scale1_scores_key = cls.get_scores_key('vif_num_scale1') vif_den_scale1_scores_key = cls.get_scores_key('vif_den_scale1') vif_num_scale2_scores_key", "vmaf_study; Anush's VIF fix # VERSION = '0.2' # expose", "handling, uniform reading with option of offset for input YUV,", "VmafConfig, VmafExternalConfig from vmaf.core.executor import Executor from vmaf.core.result import Result", ") result.result_dict[adm_scale3_scores_key] = list( (np.array(result.result_dict[adm_num_scale3_scores_key]) + cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale3_scores_key]) +", "vif_den_scale3)) / 4.0 vif_scores_key = cls.get_scores_key('vif2') result.result_dict[vif_scores_key] = list( (", "dis_y.var() + firstm**2 scores_mtx_list.append(np.hstack(([firstm], [secondm]))) i += 1 dis_scores_mtx =", "# override Executor._post_process_result result = super(VmafFeatureExtractor, cls)._post_process_result(result) # adm2 =", "np import ast from vmaf import ExternalProgramCaller, to_list from vmaf.config", "= len(atom_feature_scores_dict[self.ATOM_FEATURES[0]]) assert len_score != 0 for atom_feature in self.ATOM_FEATURES[1:]:", "# vmaf_study; Anush's VIF fix # VERSION = '0.2' #", "= list(ref_scores_mtx[:, 1]) feature_result[self.get_scores_key('dis1st')] = list(dis_scores_mtx[:, 0]) feature_result[self.get_scores_key('dis2nd')] = list(dis_scores_mtx[:,", "1]) return feature_result @classmethod def _post_process_result(cls, result): # override Executor._post_process_result", "result): # override Executor._post_process_result result = super(MomentFeatureExtractor, cls)._post_process_result(result) # calculate", "and VERSION 2) Override _generate_result(self, asset), which call a command-line", "call a command-line executable and generate feature scores in a", "num_ref_features = ref_scores_mtx.shape assert num_ref_features == 2 # ref1st, ref2nd", "+ ADM_SCALE_CONSTANT))) / 4.0 adm3_scores_key = cls.get_scores_key('adm3') result.result_dict[adm3_scores_key] = list(", "mo = re.match(re_template, line) if mo: cur_idx = int(mo.group(1)) assert", "'dis2nd', ] DERIVED_ATOM_FEATURES = ['refvar', 'disvar', ] def _generate_result(self, asset):", "(np.array(result.result_dict[adm_den_scores_key]) + cls.ADM2_CONSTANT) ) # vif_scalei = vif_num_scalei / vif_den_scalei,", "adm2_scores_key = cls.get_scores_key('adm2') adm_num_scores_key = cls.get_scores_key('adm_num') adm_den_scores_key = cls.get_scores_key('adm_den') result.result_dict[adm2_scores_key]", "VERSION = \"1.0\" VERSION = \"1.1\" # fix OPT_RANGE_PIXEL_OFFSET =", "{} result.update(self._get_feature_scores(asset)) executor_id = self.executor_id return Result(asset, executor_id, result) @classmethod", "def ATOM_FEATURES(self): raise NotImplementedError def _read_result(self, asset): result = {}", "+ cls.ADM_SCALE_CONSTANT) / (np.array(result.result_dict[adm_den_scale0_scores_key]) + cls.ADM_SCALE_CONSTANT)) + ((np.array(result.result_dict[adm_num_scale1_scores_key]) + cls.ADM_SCALE_CONSTANT)", "YuvReader class FeatureExtractor(Executor): \"\"\" FeatureExtractor takes in a list of", "quality_height = asset.quality_width_height log_file_path = self._get_log_file_path(asset) yuv_type=self._get_workfile_yuv_type(asset) ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width", "can be identified. A derived class of FeatureExtractor must: 1)", "m: m[1] - m[0] * m[0] result.result_dict[refvar_scores_key] = \\ to_list(map(get_var,", "np.vstack(scores_mtx_list) assert ref_scores_mtx is not None and dis_scores_mtx is not", "log_file_path, logger) class MomentFeatureExtractor(FeatureExtractor): TYPE = \"Moment_feature\" # VERSION =", "dis1st, dis2nd feature_result = {} feature_result[self.get_scores_key('ref1st')] = list(ref_scores_mtx[:, 0]) feature_result[self.get_scores_key('ref2nd')]", "atom_feature in self.ATOM_FEATURES: scores_key = self.get_scores_key(atom_feature) feature_result[scores_key] = atom_feature_scores_dict[atom_feature] return", "adm feature passing scale into dwt_quant_step # VERSION = '0.2.4b'", "= ref_scores_mtx.shape assert num_ref_features == 2 # ref1st, ref2nd _,", "w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_psnr(yuv_type, ref_path, dis_path, w, h,", "cls.get_scores_key('vif_num_scale1') vif_den_scale1_scores_key = cls.get_scores_key('vif_den_scale1') vif_num_scale2_scores_key = cls.get_scores_key('vif_num_scale2') vif_den_scale2_scores_key = cls.get_scores_key('vif_den_scale2')", "ref_scores_mtx is not None and dis_scores_mtx is not None log_dict", "self.logger ExternalProgramCaller.call_vifdiff_feature(yuv_type, ref_path, dis_path, w, h, log_file_path, logger) @classmethod def", "for atom_feature in self.ATOM_FEATURES: atom_feature_scores_dict[atom_feature] = [] atom_feature_idx_dict[atom_feature] = 0", "command-line executable and generate feature scores in a log file.", "len(atom_feature_scores_dict[atom_feature]), \\ \"Feature data possibly corrupt. Run cleanup script and", "h=quality_height logger = self.logger ExternalProgramCaller.call_ms_ssim(yuv_type, ref_path, dis_path, w, h, log_file_path,", "adm_den, anpsnr # VERSION = '0.2.1' # expose vif num/den", "call the command-line executable and generate feature # scores in", "ADM_SCALE_CONSTANT)) # + ((adm_num_scale2 + ADM_SCALE_CONSTANT) / (adm_den_scale2 + ADM_SCALE_CONSTANT))", "= {} for atom_feature in self.ATOM_FEATURES: atom_feature_scores_dict[atom_feature] = [] atom_feature_idx_dict[atom_feature]", "(adm_den_scale3 + ADM_SCALE_CONSTANT))) / 4.0 adm3_scores_key = cls.get_scores_key('adm3') result.result_dict[adm3_scores_key] =", "ExternalProgramCaller, to_list from vmaf.config import VmafConfig, VmafExternalConfig from vmaf.core.executor import", "re_template = \"{af}: ([0-9]+) ([a-zA-Z0-9.-]+)\".format(af=atom_feature) mo = re.match(re_template, line) if", "ref_path=asset.ref_workfile_path dis_path=asset.dis_workfile_path w=quality_width h=quality_height logger = self.logger ExternalProgramCaller.call_vmaf_feature(yuv_type, ref_path, dis_path,", "format. For an example, follow VmafFeatureExtractor. \"\"\" __metaclass__ = ABCMeta", "(np.array(result.result_dict[adm_num_scores_key]) + cls.ADM2_CONSTANT) / (np.array(result.result_dict[adm_den_scores_key]) + cls.ADM2_CONSTANT) ) # vif_scalei", "(np.array(result.result_dict[vifdiff_num_scale1_scores_key]) / np.array(result.result_dict[vifdiff_den_scale1_scores_key])) ) result.result_dict[vifdiff_scale2_scores_key] = list( (np.array(result.result_dict[vifdiff_num_scale2_scores_key]) / np.array(result.result_dict[vifdiff_den_scale2_scores_key]))", "self._get_log_file_path(asset) with open(log_file_path, 'rt') as log_file: log_str = log_file.read() log_dict", "for feature in cls.DERIVED_ATOM_FEATURES: assert cls.get_scores_key(feature) in result.result_dict return result", "= cls.get_scores_key('adm_den_scale3') adm_scale0_scores_key = cls.get_scores_key('adm_scale0') adm_scale1_scores_key = cls.get_scores_key('adm_scale1') adm_scale2_scores_key =" ]
[]
[ "by susy at 2019/11/8 \"\"\" from dao.dao import DataDao import", "# -*- coding: utf-8 -*- \"\"\" Created by susy at", "-*- coding: utf-8 -*- \"\"\" Created by susy at 2019/11/8", "from dao.models import PanAccounts from cfg import PAN_SERVICE, MASTER_ACCOUNT_ID class", "import pytz from dao.models import PanAccounts from cfg import PAN_SERVICE,", "utf-8 -*- \"\"\" Created by susy at 2019/11/8 \"\"\" from", "susy at 2019/11/8 \"\"\" from dao.dao import DataDao import pytz", "PAN_SERVICE, MASTER_ACCOUNT_ID class BaseService: def __init__(self): self.default_tz = pytz.timezone('Asia/Chongqing') #", "import PanAccounts from cfg import PAN_SERVICE, MASTER_ACCOUNT_ID class BaseService: def", "import DataDao import pytz from dao.models import PanAccounts from cfg", "pytz from dao.models import PanAccounts from cfg import PAN_SERVICE, MASTER_ACCOUNT_ID", "Created by susy at 2019/11/8 \"\"\" from dao.dao import DataDao", "dao.dao import DataDao import pytz from dao.models import PanAccounts from", "coding: utf-8 -*- \"\"\" Created by susy at 2019/11/8 \"\"\"", "at 2019/11/8 \"\"\" from dao.dao import DataDao import pytz from", "from cfg import PAN_SERVICE, MASTER_ACCOUNT_ID class BaseService: def __init__(self): self.default_tz", "BaseService: def __init__(self): self.default_tz = pytz.timezone('Asia/Chongqing') # self.pan_acc: PanAccounts =", "\"\"\" Created by susy at 2019/11/8 \"\"\" from dao.dao import", "PanAccounts from cfg import PAN_SERVICE, MASTER_ACCOUNT_ID class BaseService: def __init__(self):", "\"\"\" from dao.dao import DataDao import pytz from dao.models import", "DataDao import pytz from dao.models import PanAccounts from cfg import", "-*- \"\"\" Created by susy at 2019/11/8 \"\"\" from dao.dao", "MASTER_ACCOUNT_ID class BaseService: def __init__(self): self.default_tz = pytz.timezone('Asia/Chongqing') # self.pan_acc:", "def __init__(self): self.default_tz = pytz.timezone('Asia/Chongqing') # self.pan_acc: PanAccounts = DataDao.pan_account_list(MASTER_ACCOUNT_ID,", "__init__(self): self.default_tz = pytz.timezone('Asia/Chongqing') # self.pan_acc: PanAccounts = DataDao.pan_account_list(MASTER_ACCOUNT_ID, False)", "from dao.dao import DataDao import pytz from dao.models import PanAccounts", "2019/11/8 \"\"\" from dao.dao import DataDao import pytz from dao.models", "import PAN_SERVICE, MASTER_ACCOUNT_ID class BaseService: def __init__(self): self.default_tz = pytz.timezone('Asia/Chongqing')", "cfg import PAN_SERVICE, MASTER_ACCOUNT_ID class BaseService: def __init__(self): self.default_tz =", "dao.models import PanAccounts from cfg import PAN_SERVICE, MASTER_ACCOUNT_ID class BaseService:", "class BaseService: def __init__(self): self.default_tz = pytz.timezone('Asia/Chongqing') # self.pan_acc: PanAccounts" ]
[ "value, mask=None): if mask is not None: mask = mask.unsqueeze(1)", "= nn.Dropout(p=dropout) def forward(self, query, key, value, mask=None): if mask", "nn.Linear(d_model, d_model) self.dropout = nn.Dropout(p=dropout) def forward(self, query, key, value,", "2) for l, x in zip(self.linear_layers, (query, key, value))] #", "key, value = [l(x).view(batch_size, -1, self.nhead, self.d_k).transpose(1, 2) for l,", "d_model) self.dropout = nn.Dropout(p=dropout) def forward(self, query, key, value, mask=None):", "projected vectors in batch. x, attn = attention(query, key, value,", "d_model) for _ in range(3)]) self.output_linear = nn.Linear(d_model, d_model) self.dropout", "d_model % nhead ==0 # we assume d_v always equal", "=> h x d_k query, key, value = [l(x).view(batch_size, -1,", "# 2) Apply attention on all the projected vectors in", "range(3)]) self.output_linear = nn.Linear(d_model, d_model) self.dropout = nn.Dropout(p=dropout) def forward(self,", "self.output_linear = nn.Linear(d_model, d_model) self.dropout = nn.Dropout(p=dropout) def forward(self, query,", "zip(self.linear_layers, (query, key, value))] # 2) Apply attention on all", "__init__(self, d_model, nhead, dropout=0.1): super().__init__() assert d_model % nhead ==0", "def __init__(self, d_model, nhead, dropout=0.1): super().__init__() assert d_model % nhead", "view and apply a final linear. x = x.transpose(1, 2).contiguous().view(batch_size,", "key, value))] # 2) Apply attention on all the projected", "import attention class MultiHeadedAttention(nn.Module): def __init__(self, d_model, nhead, dropout=0.1): super().__init__()", "1) Do all the linear projections in batch from d_model", "super().__init__() assert d_model % nhead ==0 # we assume d_v", "vectors in batch. x, attn = attention(query, key, value, mask=mask,", "# 3) \"Concat\" using a view and apply a final", "final linear. x = x.transpose(1, 2).contiguous().view(batch_size, -1, self.nhead * self.d_k)", "nhead self.linear_layers = nn.ModuleList([nn.Linear(d_model, d_model) for _ in range(3)]) self.output_linear", "d_model // nhead self.nhead = nhead self.linear_layers = nn.ModuleList([nn.Linear(d_model, d_model)", "we assume d_v always equal d_k self.d_k = d_model //", "(query, key, value))] # 2) Apply attention on all the", "batch from d_model => h x d_k query, key, value", "python3 # -*- coding: utf-8 -*- import torch.nn as nn", "apply a final linear. x = x.transpose(1, 2).contiguous().view(batch_size, -1, self.nhead", "dropout=0.1): super().__init__() assert d_model % nhead ==0 # we assume", "nhead, dropout=0.1): super().__init__() assert d_model % nhead ==0 # we", "Apply attention on all the projected vectors in batch. x,", "mask=mask, dropout=self.dropout) # 3) \"Concat\" using a view and apply", "if mask is not None: mask = mask.unsqueeze(1) batch_size =", "= attention(query, key, value, mask=mask, dropout=self.dropout) # 3) \"Concat\" using", "nhead self.nhead = nhead self.linear_layers = nn.ModuleList([nn.Linear(d_model, d_model) for _", "equal d_k self.d_k = d_model // nhead self.nhead = nhead", "nn.Dropout(p=dropout) def forward(self, query, key, value, mask=None): if mask is", "nn from .single import attention class MultiHeadedAttention(nn.Module): def __init__(self, d_model,", "d_model, nhead, dropout=0.1): super().__init__() assert d_model % nhead ==0 #", "linear projections in batch from d_model => h x d_k", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- import torch.nn as", "l, x in zip(self.linear_layers, (query, key, value))] # 2) Apply", "-*- import torch.nn as nn from .single import attention class", "the linear projections in batch from d_model => h x", "None: mask = mask.unsqueeze(1) batch_size = query.size(0) # 1) Do", "x.transpose(1, 2).contiguous().view(batch_size, -1, self.nhead * self.d_k) context = self.output_linear(x) return", "from d_model => h x d_k query, key, value =", "d_model => h x d_k query, key, value = [l(x).view(batch_size,", "all the projected vectors in batch. x, attn = attention(query,", "from .single import attention class MultiHeadedAttention(nn.Module): def __init__(self, d_model, nhead,", "coding: utf-8 -*- import torch.nn as nn from .single import", "\"Concat\" using a view and apply a final linear. x", "mask=None): if mask is not None: mask = mask.unsqueeze(1) batch_size", "# we assume d_v always equal d_k self.d_k = d_model", "= [l(x).view(batch_size, -1, self.nhead, self.d_k).transpose(1, 2) for l, x in", "mask is not None: mask = mask.unsqueeze(1) batch_size = query.size(0)", "as nn from .single import attention class MultiHeadedAttention(nn.Module): def __init__(self,", "self.nhead = nhead self.linear_layers = nn.ModuleList([nn.Linear(d_model, d_model) for _ in", "h x d_k query, key, value = [l(x).view(batch_size, -1, self.nhead,", "import torch.nn as nn from .single import attention class MultiHeadedAttention(nn.Module):", "in zip(self.linear_layers, (query, key, value))] # 2) Apply attention on", "= nn.Linear(d_model, d_model) self.dropout = nn.Dropout(p=dropout) def forward(self, query, key,", "for _ in range(3)]) self.output_linear = nn.Linear(d_model, d_model) self.dropout =", "assume d_v always equal d_k self.d_k = d_model // nhead", "= x.transpose(1, 2).contiguous().view(batch_size, -1, self.nhead * self.d_k) context = self.output_linear(x)", "[l(x).view(batch_size, -1, self.nhead, self.d_k).transpose(1, 2) for l, x in zip(self.linear_layers,", "3) \"Concat\" using a view and apply a final linear.", "attention on all the projected vectors in batch. x, attn", "value = [l(x).view(batch_size, -1, self.nhead, self.d_k).transpose(1, 2) for l, x", "x d_k query, key, value = [l(x).view(batch_size, -1, self.nhead, self.d_k).transpose(1,", "for l, x in zip(self.linear_layers, (query, key, value))] # 2)", "= nn.ModuleList([nn.Linear(d_model, d_model) for _ in range(3)]) self.output_linear = nn.Linear(d_model,", "class MultiHeadedAttention(nn.Module): def __init__(self, d_model, nhead, dropout=0.1): super().__init__() assert d_model", "utf-8 -*- import torch.nn as nn from .single import attention", "a final linear. x = x.transpose(1, 2).contiguous().view(batch_size, -1, self.nhead *", "using a view and apply a final linear. x =", "torch.nn as nn from .single import attention class MultiHeadedAttention(nn.Module): def", "linear. x = x.transpose(1, 2).contiguous().view(batch_size, -1, self.nhead * self.d_k) context", ".single import attention class MultiHeadedAttention(nn.Module): def __init__(self, d_model, nhead, dropout=0.1):", "= query.size(0) # 1) Do all the linear projections in", "projections in batch from d_model => h x d_k query,", "MultiHeadedAttention(nn.Module): def __init__(self, d_model, nhead, dropout=0.1): super().__init__() assert d_model %", "batch. x, attn = attention(query, key, value, mask=mask, dropout=self.dropout) #", "always equal d_k self.d_k = d_model // nhead self.nhead =", "key, value, mask=mask, dropout=self.dropout) # 3) \"Concat\" using a view", "x = x.transpose(1, 2).contiguous().view(batch_size, -1, self.nhead * self.d_k) context =", "self.dropout = nn.Dropout(p=dropout) def forward(self, query, key, value, mask=None): if", "2) Apply attention on all the projected vectors in batch.", "d_k self.d_k = d_model // nhead self.nhead = nhead self.linear_layers", "self.d_k).transpose(1, 2) for l, x in zip(self.linear_layers, (query, key, value))]", "query, key, value = [l(x).view(batch_size, -1, self.nhead, self.d_k).transpose(1, 2) for", "dropout=self.dropout) # 3) \"Concat\" using a view and apply a", "all the linear projections in batch from d_model => h", "self.d_k = d_model // nhead self.nhead = nhead self.linear_layers =", "x, attn = attention(query, key, value, mask=mask, dropout=self.dropout) # 3)", "attn = attention(query, key, value, mask=mask, dropout=self.dropout) # 3) \"Concat\"", "mask.unsqueeze(1) batch_size = query.size(0) # 1) Do all the linear", "nhead ==0 # we assume d_v always equal d_k self.d_k", "batch_size = query.size(0) # 1) Do all the linear projections", "-1, self.nhead, self.d_k).transpose(1, 2) for l, x in zip(self.linear_layers, (query,", "self.linear_layers = nn.ModuleList([nn.Linear(d_model, d_model) for _ in range(3)]) self.output_linear =", "mask = mask.unsqueeze(1) batch_size = query.size(0) # 1) Do all", "is not None: mask = mask.unsqueeze(1) batch_size = query.size(0) #", "on all the projected vectors in batch. x, attn =", "nn.ModuleList([nn.Linear(d_model, d_model) for _ in range(3)]) self.output_linear = nn.Linear(d_model, d_model)", "d_k query, key, value = [l(x).view(batch_size, -1, self.nhead, self.d_k).transpose(1, 2)", "not None: mask = mask.unsqueeze(1) batch_size = query.size(0) # 1)", "value, mask=mask, dropout=self.dropout) # 3) \"Concat\" using a view and", "= d_model // nhead self.nhead = nhead self.linear_layers = nn.ModuleList([nn.Linear(d_model,", "query, key, value, mask=None): if mask is not None: mask", "def forward(self, query, key, value, mask=None): if mask is not", "<reponame>StateOfTheArt-quant/transformerquant #!/usr/bin/env python3 # -*- coding: utf-8 -*- import torch.nn", "attention class MultiHeadedAttention(nn.Module): def __init__(self, d_model, nhead, dropout=0.1): super().__init__() assert", "forward(self, query, key, value, mask=None): if mask is not None:", "_ in range(3)]) self.output_linear = nn.Linear(d_model, d_model) self.dropout = nn.Dropout(p=dropout)", "# 1) Do all the linear projections in batch from", "self.nhead, self.d_k).transpose(1, 2) for l, x in zip(self.linear_layers, (query, key,", "% nhead ==0 # we assume d_v always equal d_k", "==0 # we assume d_v always equal d_k self.d_k =", "x in zip(self.linear_layers, (query, key, value))] # 2) Apply attention", "Do all the linear projections in batch from d_model =>", "attention(query, key, value, mask=mask, dropout=self.dropout) # 3) \"Concat\" using a", "and apply a final linear. x = x.transpose(1, 2).contiguous().view(batch_size, -1,", "= nhead self.linear_layers = nn.ModuleList([nn.Linear(d_model, d_model) for _ in range(3)])", "= mask.unsqueeze(1) batch_size = query.size(0) # 1) Do all the", "a view and apply a final linear. x = x.transpose(1,", "key, value, mask=None): if mask is not None: mask =", "in range(3)]) self.output_linear = nn.Linear(d_model, d_model) self.dropout = nn.Dropout(p=dropout) def", "// nhead self.nhead = nhead self.linear_layers = nn.ModuleList([nn.Linear(d_model, d_model) for", "-*- coding: utf-8 -*- import torch.nn as nn from .single", "in batch from d_model => h x d_k query, key,", "-1, self.nhead * self.d_k) context = self.output_linear(x) return context#, attn", "2).contiguous().view(batch_size, -1, self.nhead * self.d_k) context = self.output_linear(x) return context#,", "assert d_model % nhead ==0 # we assume d_v always", "value))] # 2) Apply attention on all the projected vectors", "in batch. x, attn = attention(query, key, value, mask=mask, dropout=self.dropout)", "query.size(0) # 1) Do all the linear projections in batch", "d_v always equal d_k self.d_k = d_model // nhead self.nhead", "the projected vectors in batch. x, attn = attention(query, key,", "# -*- coding: utf-8 -*- import torch.nn as nn from" ]
[ "strategy == \"mh\": print(\"**** \", str(system), \" Job \", str(job),", "help='Suffix (chosen epoch, e.g. 1981)', required=True) parser.add_argument('-j', '--job', help='Job (0/1)',", "elif strategy == \"naive\": print(\"**** \", str(system), \" Job \",", "= argparse.ArgumentParser() parser.add_argument('-s', '--system', help='Which system (e.g. pb_system_5_3)', required=True) parser.add_argument('-sfx',", "WORK_PATH = os.path.abspath(os.getcwd()) def readFile(f_name1, f_name2, unique=False): traces = []", "pm4py.objects.log.exporter.xes import factory as xes_exporter from pm4py.objects.log.importer.xes import factory as", "strategy + \"_generalization.csv\") xes_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system +", "= csv_importer.import_event_log(csv_file) xes_exporter.export_log(log, xes_file) time.sleep(1) \"\"\" PERFORM MEASUREMENT ON PN", "+ \"_\" + strategy + \"_generalization.xes\") pn_file = os.path.join(DATA_PATH, \"pns\",", "+ \"_\" + \",\" + str(case) + \",\" + str(dt_object)", "traces: for event in trace: timestamp = timestamp + 1", "entry in lst: outfile.write(str(entry) + \"\\n\") def convertToCsv(traces, to_path): lines", "csv_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system + \"_relgan_\" +", "in file_contents: if unique: if row not in traces: traces.append(row)", "+ \"_\" + strategy + \".txt\") csv_file = os.path.join(DATA_PATH, \"avatar\",", "\"variants\", system + \"_train.txt\") gen_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\",", "'w') as outfile: for entry in lst: outfile.write(str(entry) + \"\\n\")", "file_contents = file.read() file_contents = file_contents.split(\"\\n\") print(\"Number of train traces", "args.job pn = args.pn strategy = args.strategy if DATA_PATH is", "= datetime.fromtimestamp(timestamp) line = str(event) + \"_\" + \",\" +", "unique: if row not in traces: traces.append(row) else: skipped +=", "trace.split(\" \") for i in t: if i != \"\"", "str(len(file_contents))) for row in file_contents: if unique: if row not", "[] t = trace.split(\" \") for i in t: if", "+= 1 else: traces.append(row) with open(f_name2) as file: file_contents =", "suffix \", str(suffix), \" ***\") else: raise ValueError(\"Unknown strategy.\") print(\"AVATAR", "xes_exporter from pm4py.objects.log.importer.xes import factory as xes_importer from pm4py.objects.petri.importer import", "file_contents: if unique: if row not in traces: traces.append(row) else:", "if row not in traces: traces.append(row) else: skipped += 1", "= 0 with open(f_name1) as file: file_contents = file.read() file_contents", "= [] for trace in traces: f_trace = [] t", "+ \"_\" + strategy + \"_generalization.csv\") xes_file = os.path.join(WORK_PATH, \"data\",", "line = str(event) + \"_\" + \",\" + str(case) +", "import factory as precision_factory from conf.settings import DATA_PATH WORK_PATH =", "skipped += 1 else: traces.append(row) f_traces = [] for trace", "system + \"_train.txt\") gen_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system", "> 0: f_traces.append(f_trace) print(\"Number of traces are:\", str(len(f_traces))) print(\"Number of", "outfile: for entry in lst: outfile.write(str(entry) + \"\\n\") def convertToCsv(traces,", "log = xes_importer.import_log(xes_file) net, initial_marking, final_marking = pnml_importer.import_net(pn_file) fitness =", "lines = [] case = 0 timestamp = 0 line", "as xes_exporter from pm4py.objects.log.importer.xes import factory as xes_importer from pm4py.objects.petri.importer", "open(file, 'w') as outfile: for entry in lst: outfile.write(str(entry) +", "str(event) + \"_\" + \",\" + str(case) + \",\" +", "+ \"_\" + strategy + \"_generalization.xes\") pn_file = os.path.join(WORK_PATH, \"data\",", "for entry in lst: outfile.write(str(entry) + \"\\n\") def convertToCsv(traces, to_path):", "= pnml_importer.import_net(pn_file) fitness = replay_factory.apply(log, net, initial_marking, final_marking) print(\"Fitness=\", fitness)", "in t: if i != \"\" and \"<\" not in", "\"_\" + strategy + \"_generalization.csv\") xes_file = os.path.join(WORK_PATH, \"data\", \"avatar\",", "+ \"_\" + strategy + \"_generalization.csv\") xes_file = os.path.join(DATA_PATH, \"avatar\",", "\", str(suffix),\" ***\") elif strategy == \"naive\": print(\"**** \", str(system),", "skipped = 0 with open(f_name1) as file: file_contents = file.read()", "traces.append(row) f_traces = [] for trace in traces: f_trace =", "= timestamp + 1 dt_object = datetime.fromtimestamp(timestamp) line = str(event)", "+ \"_\" + strategy + \".txt\") csv_file = os.path.join(WORK_PATH, \"data\",", "str(len(f_traces))) print(\"Number of skipped traces are:\", str(skipped)) return f_traces def", "skipped traces are:\", str(skipped)) return f_traces def writeToFile(file, lst): with", "\",\" + str(case) + \",\" + str(dt_object) lines.append(line) case =", "initial_marking, final_marking) print(\"Fitness=\", fitness) precision = precision_factory.apply(log, net, initial_marking, final_marking)", "gen_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system + \"_relgan_\" +", "\"concept:name,case:concept:name,time:timestamp\" lines.append(line) for trace in traces: for event in trace:", "+ \"_j\" + str(job) + \"_\" + strategy + \"_generalization.xes\")", "epoch, e.g. 1981)', required=True) parser.add_argument('-j', '--job', help='Job (0/1)', required=True) parser.add_argument('-pn',", "Job \", str(job), \" on PN \", str(pn_file), \" using", "system + \"_relgan_\" + str(suffix) + \"_j\" + str(job) +", "(fitness + precision)) if strategy == \"mh\": print(\"**** \", str(system),", "xes_file) time.sleep(1) \"\"\" PERFORM MEASUREMENT ON PN AND XES\"\"\" log", "= os.path.join(DATA_PATH, \"variants\", system + \"_train.txt\") gen_file = os.path.join(DATA_PATH, \"avatar\",", "***\") elif strategy == \"naive\": print(\"**** \", str(system), \" Job", "os.path.join(DATA_PATH, \"variants\", system + \"_train.txt\") gen_file = os.path.join(DATA_PATH, \"avatar\", \"variants\",", "\"_\" + \",\" + str(case) + \",\" + str(dt_object) lines.append(line)", "trace in traces: for event in trace: timestamp = timestamp", "f_name2, unique=False): traces = [] skipped = 0 with open(f_name1)", "strategy == \"naive\": print(\"**** \", str(system), \" Job \", str(job),", "\") for i in t: if i != \"\" and", "event in trace: timestamp = timestamp + 1 dt_object =", "if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-s', '--system', help='Which", "= os.path.abspath(os.getcwd()) def readFile(f_name1, f_name2, unique=False): traces = [] skipped", "os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system + \"_relgan_\" + str(suffix) +", "in trace: timestamp = timestamp + 1 dt_object = datetime.fromtimestamp(timestamp)", "int(args.suffix) job = args.job pn = args.pn strategy = args.strategy", "lst): with open(file, 'w') as outfile: for entry in lst:", "(e.g. pb_system_5_3)', required=True) parser.add_argument('-sfx', '--suffix', help='Suffix (chosen epoch, e.g. 1981)',", "\"\\n\") def convertToCsv(traces, to_path): lines = [] case = 0", "f_trace.append(i) if len(f_trace) > 0: f_traces.append(f_trace) print(\"Number of traces are:\",", "!= \"\" and \"<\" not in i: f_trace.append(i) if len(f_trace)", "\"\"\" READ FILES AND CONVERT TO XES \"\"\" traces =", "in lst: outfile.write(str(entry) + \"\\n\") def convertToCsv(traces, to_path): lines =", "= os.path.join(WORK_PATH, \"data\", \"variants\", system + \"_train.txt\") gen_file = os.path.join(WORK_PATH,", "\"\"\" traces = readFile(train_file,gen_file, unique=True) convertToCsv(traces=traces, to_path=csv_file) time.sleep(1) log =", "not in i: f_trace.append(i) if len(f_trace) > 0: f_traces.append(f_trace) print(\"Number", "* precision) / (fitness + precision)) if strategy == \"mh\":", "in traces: for event in trace: timestamp = timestamp +", "1 else: traces.append(row) f_traces = [] for trace in traces:", "+ \"_train.txt\") gen_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system +", "time.sleep(1) \"\"\" PERFORM MEASUREMENT ON PN AND XES\"\"\" log =", "of generated traces are:\", str(len(file_contents))) for row in file_contents: if", "pn = args.pn strategy = args.strategy if DATA_PATH is None:", "\" using MH SAMPLING on suffix \", str(suffix),\" ***\") elif", "parser.add_argument('-pn', '--pn', help='Petri net file to evaluate', required=True) parser.add_argument('-strategy', '--strategy',", "= os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system + \"_relgan_\" + str(suffix)", "datetime from pm4py.objects.log.importer.csv import factory as csv_importer from pm4py.objects.log.exporter.xes import", "from conf.settings import DATA_PATH WORK_PATH = os.path.abspath(os.getcwd()) def readFile(f_name1, f_name2,", "of train traces are:\", str(len(file_contents))) for row in file_contents: if", "pnml_importer.import_net(pn_file) fitness = replay_factory.apply(log, net, initial_marking, final_marking) print(\"Fitness=\", fitness) precision", "str(job), \" on PN \", str(pn_file), \" using NAIVE SAMPLING", "str(case) + \",\" + str(dt_object) lines.append(line) case = case +", "SAMPLING on suffix \", str(suffix), \" ***\") else: raise ValueError(\"Unknown", "pb_system_5_3)', required=True) parser.add_argument('-sfx', '--suffix', help='Suffix (chosen epoch, e.g. 1981)', required=True)", "as csv_importer from pm4py.objects.log.exporter.xes import factory as xes_exporter from pm4py.objects.log.importer.xes", "\", str(job), \" on PN \", str(pn_file), \" using MH", "AND CONVERT TO XES \"\"\" traces = readFile(train_file,gen_file, unique=True) convertToCsv(traces=traces,", "\"_\" + strategy + \"_generalization.xes\") pn_file = os.path.join(DATA_PATH, \"pns\", system,", "line = \"concept:name,case:concept:name,time:timestamp\" lines.append(line) for trace in traces: for event", "row in file_contents: if unique: if row not in traces:", "\" using NAIVE SAMPLING on suffix \", str(suffix), \" ***\")", "generated traces are:\", str(len(file_contents))) for row in file_contents: if unique:", "trace in traces: f_trace = [] t = trace.split(\" \")", "\", str(suffix), \" ***\") else: raise ValueError(\"Unknown strategy.\") print(\"AVATAR Generalization=\",", "log = csv_importer.import_event_log(csv_file) xes_exporter.export_log(log, xes_file) time.sleep(1) \"\"\" PERFORM MEASUREMENT ON", "PN AND XES\"\"\" log = xes_importer.import_log(xes_file) net, initial_marking, final_marking =", "xes_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\" + str(suffix)", "\"<\" not in i: f_trace.append(i) if len(f_trace) > 0: f_traces.append(f_trace)", "str(dt_object) lines.append(line) case = case + 1 writeToFile(str(to_path), lines) if", "if len(f_trace) > 0: f_traces.append(f_trace) print(\"Number of traces are:\", str(len(f_traces)))", "= os.path.join(WORK_PATH, \"data\", \"pns\", system, pn) else: train_file = os.path.join(DATA_PATH,", "import datetime from pm4py.objects.log.importer.csv import factory as csv_importer from pm4py.objects.log.exporter.xes", "parser.add_argument('-s', '--system', help='Which system (e.g. pb_system_5_3)', required=True) parser.add_argument('-sfx', '--suffix', help='Suffix", "(0/1)', required=True) parser.add_argument('-pn', '--pn', help='Petri net file to evaluate', required=True)", "xes_importer from pm4py.objects.petri.importer import pnml as pnml_importer from pm4py.evaluation.replay_fitness import", "e.g. 1981)', required=True) parser.add_argument('-j', '--job', help='Job (0/1)', required=True) parser.add_argument('-pn', '--pn',", "\"_\" + strategy + \".txt\") csv_file = os.path.join(DATA_PATH, \"avatar\", \"variants\",", "required=True) parser.add_argument('-strategy', '--strategy', help='naive/mh', required=True) args = parser.parse_args() system =", "as file: file_contents = file.read() file_contents = file_contents.split(\"\\n\") print(\"Number of", "def readFile(f_name1, f_name2, unique=False): traces = [] skipped = 0", "str(job) + \"_\" + strategy + \"_generalization.csv\") xes_file = os.path.join(WORK_PATH,", "PN \", str(pn_file), \" using MH SAMPLING on suffix \",", "MH SAMPLING on suffix \", str(suffix),\" ***\") elif strategy ==", "= readFile(train_file,gen_file, unique=True) convertToCsv(traces=traces, to_path=csv_file) time.sleep(1) log = csv_importer.import_event_log(csv_file) xes_exporter.export_log(log,", "FILES AND CONVERT TO XES \"\"\" traces = readFile(train_file,gen_file, unique=True)", "\"_j\" + str(job) + \"_\" + strategy + \"_generalization.xes\") pn_file", "= replay_factory.apply(log, net, initial_marking, final_marking) print(\"Fitness=\", fitness) precision = precision_factory.apply(log,", "\"data\", \"pns\", system, pn) else: train_file = os.path.join(DATA_PATH, \"variants\", system", "len(f_trace) > 0: f_traces.append(f_trace) print(\"Number of traces are:\", str(len(f_traces))) print(\"Number", "as replay_factory from pm4py.evaluation.precision import factory as precision_factory from conf.settings", "= xes_importer.import_log(xes_file) net, initial_marking, final_marking = pnml_importer.import_net(pn_file) fitness = replay_factory.apply(log,", "from datetime import datetime from pm4py.objects.log.importer.csv import factory as csv_importer", "with open(f_name1) as file: file_contents = file.read() file_contents = file_contents.split(\"\\n\")", "+ 1 writeToFile(str(to_path), lines) if __name__ == \"__main__\": parser =", "+ \".txt\") csv_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\"", "to evaluate', required=True) parser.add_argument('-strategy', '--strategy', help='naive/mh', required=True) args = parser.parse_args()", "initial_marking, final_marking = pnml_importer.import_net(pn_file) fitness = replay_factory.apply(log, net, initial_marking, final_marking)", "+ \",\" + str(case) + \",\" + str(dt_object) lines.append(line) case", "\".txt\") csv_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system + \"_relgan_\"", "print(\"Precision=\", precision) fitness = fitness[\"log_fitness\"] generalization = 2 * ((fitness", "final_marking = pnml_importer.import_net(pn_file) fitness = replay_factory.apply(log, net, initial_marking, final_marking) print(\"Fitness=\",", "\", str(job), \" on PN \", str(pn_file), \" using NAIVE", "* ((fitness * precision) / (fitness + precision)) if strategy", "\"avatar\", \"variants\", system + \"_relgan_\" + str(suffix) + \"_j\" +", "+ strategy + \"_generalization.csv\") xes_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\",", "MEASUREMENT ON PN AND XES\"\"\" log = xes_importer.import_log(xes_file) net, initial_marking,", "traces.append(row) with open(f_name2) as file: file_contents = file.read() file_contents =", "traces: f_trace = [] t = trace.split(\" \") for i", "\"_generalization.xes\") pn_file = os.path.join(WORK_PATH, \"data\", \"pns\", system, pn) else: train_file", "on PN \", str(pn_file), \" using MH SAMPLING on suffix", "xes_exporter.export_log(log, xes_file) time.sleep(1) \"\"\" PERFORM MEASUREMENT ON PN AND XES\"\"\"", "XES\"\"\" log = xes_importer.import_log(xes_file) net, initial_marking, final_marking = pnml_importer.import_net(pn_file) fitness", "1981)', required=True) parser.add_argument('-j', '--job', help='Job (0/1)', required=True) parser.add_argument('-pn', '--pn', help='Petri", "+ str(suffix) + \"_j\" + str(job) + \"_\" + strategy", "\".txt\") csv_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\" +", "writeToFile(file, lst): with open(file, 'w') as outfile: for entry in", "system (e.g. pb_system_5_3)', required=True) parser.add_argument('-sfx', '--suffix', help='Suffix (chosen epoch, e.g.", "factory as xes_exporter from pm4py.objects.log.importer.xes import factory as xes_importer from", "from pm4py.evaluation.replay_fitness import factory as replay_factory from pm4py.evaluation.precision import factory", "traces are:\", str(len(file_contents))) for row in file_contents: if unique: if", "file to evaluate', required=True) parser.add_argument('-strategy', '--strategy', help='naive/mh', required=True) args =", "for row in file_contents: if unique: if row not in", "None: train_file = os.path.join(WORK_PATH, \"data\", \"variants\", system + \"_train.txt\") gen_file", "print(\"Number of train traces are:\", str(len(file_contents))) for row in file_contents:", "conf.settings import DATA_PATH WORK_PATH = os.path.abspath(os.getcwd()) def readFile(f_name1, f_name2, unique=False):", "system, pn) else: train_file = os.path.join(DATA_PATH, \"variants\", system + \"_train.txt\")", "'--job', help='Job (0/1)', required=True) parser.add_argument('-pn', '--pn', help='Petri net file to", "= os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\" + str(suffix) +", "\" Job \", str(job), \" on PN \", str(pn_file), \"", "i: f_trace.append(i) if len(f_trace) > 0: f_traces.append(f_trace) print(\"Number of traces", "AND XES\"\"\" log = xes_importer.import_log(xes_file) net, initial_marking, final_marking = pnml_importer.import_net(pn_file)", "\", str(pn_file), \" using NAIVE SAMPLING on suffix \", str(suffix),", "1 else: traces.append(row) with open(f_name2) as file: file_contents = file.read()", "final_marking) print(\"Fitness=\", fitness) precision = precision_factory.apply(log, net, initial_marking, final_marking) print(\"Precision=\",", "NAIVE SAMPLING on suffix \", str(suffix), \" ***\") else: raise", "\"_\" + strategy + \"_generalization.csv\") xes_file = os.path.join(DATA_PATH, \"avatar\", \"variants\",", "\"_\" + strategy + \"_generalization.xes\") pn_file = os.path.join(WORK_PATH, \"data\", \"pns\",", "\"_train.txt\") gen_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\" +", "args.strategy if DATA_PATH is None: train_file = os.path.join(WORK_PATH, \"data\", \"variants\",", "generalization = 2 * ((fitness * precision) / (fitness +", "import DATA_PATH WORK_PATH = os.path.abspath(os.getcwd()) def readFile(f_name1, f_name2, unique=False): traces", "'--suffix', help='Suffix (chosen epoch, e.g. 1981)', required=True) parser.add_argument('-j', '--job', help='Job", "file_contents = file_contents.split(\"\\n\") print(\"Number of train traces are:\", str(len(file_contents))) for", "required=True) parser.add_argument('-pn', '--pn', help='Petri net file to evaluate', required=True) parser.add_argument('-strategy',", "for trace in traces: f_trace = [] t = trace.split(\"", "str(skipped)) return f_traces def writeToFile(file, lst): with open(file, 'w') as", "dt_object = datetime.fromtimestamp(timestamp) line = str(event) + \"_\" + \",\"", "traces are:\", str(len(f_traces))) print(\"Number of skipped traces are:\", str(skipped)) return", "open(f_name2) as file: file_contents = file.read() file_contents = file_contents.split(\"\\n\") print(\"Number", "traces = readFile(train_file,gen_file, unique=True) convertToCsv(traces=traces, to_path=csv_file) time.sleep(1) log = csv_importer.import_event_log(csv_file)", "print(\"**** \", str(system), \" Job \", str(job), \" on PN", "\"_generalization.xes\") pn_file = os.path.join(DATA_PATH, \"pns\", system, pn) \"\"\" READ FILES", "= parser.parse_args() system = args.system suffix = int(args.suffix) job =", "str(job) + \"_\" + strategy + \"_generalization.csv\") xes_file = os.path.join(DATA_PATH,", "precision) / (fitness + precision)) if strategy == \"mh\": print(\"****", "lines.append(line) for trace in traces: for event in trace: timestamp", "+ strategy + \".txt\") csv_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system", "os.path.abspath(os.getcwd()) def readFile(f_name1, f_name2, unique=False): traces = [] skipped =", "csv_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\" + str(suffix)", "= file_contents.split(\"\\n\") print(\"Number of train traces are:\", str(len(file_contents))) for row", "PERFORM MEASUREMENT ON PN AND XES\"\"\" log = xes_importer.import_log(xes_file) net,", "i in t: if i != \"\" and \"<\" not", "\"_relgan_\" + str(suffix) + \"_j\" + str(job) + \"_\" +", "outfile.write(str(entry) + \"\\n\") def convertToCsv(traces, to_path): lines = [] case", "str(job) + \"_\" + strategy + \"_generalization.xes\") pn_file = os.path.join(DATA_PATH,", "help='Job (0/1)', required=True) parser.add_argument('-pn', '--pn', help='Petri net file to evaluate',", "= precision_factory.apply(log, net, initial_marking, final_marking) print(\"Precision=\", precision) fitness = fitness[\"log_fitness\"]", "trace: timestamp = timestamp + 1 dt_object = datetime.fromtimestamp(timestamp) line", "xes_importer.import_log(xes_file) net, initial_marking, final_marking = pnml_importer.import_net(pn_file) fitness = replay_factory.apply(log, net,", "+ str(job) + \"_\" + strategy + \"_generalization.csv\") xes_file =", "fitness = replay_factory.apply(log, net, initial_marking, final_marking) print(\"Fitness=\", fitness) precision =", "== \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-s', '--system', help='Which system (e.g.", "t: if i != \"\" and \"<\" not in i:", "\", str(pn_file), \" using MH SAMPLING on suffix \", str(suffix),\"", "net, initial_marking, final_marking) print(\"Precision=\", precision) fitness = fitness[\"log_fitness\"] generalization =", "ON PN AND XES\"\"\" log = xes_importer.import_log(xes_file) net, initial_marking, final_marking", "= case + 1 writeToFile(str(to_path), lines) if __name__ == \"__main__\":", "\"naive\": print(\"**** \", str(system), \" Job \", str(job), \" on", "parser.add_argument('-strategy', '--strategy', help='naive/mh', required=True) args = parser.parse_args() system = args.system", "with open(file, 'w') as outfile: for entry in lst: outfile.write(str(entry)", "file_contents = file_contents.split(\"\\n\") print(\"Number of generated traces are:\", str(len(file_contents))) for", "+ \"_j\" + str(job) + \"_\" + strategy + \".txt\")", "parser = argparse.ArgumentParser() parser.add_argument('-s', '--system', help='Which system (e.g. pb_system_5_3)', required=True)", "open(f_name1) as file: file_contents = file.read() file_contents = file_contents.split(\"\\n\") print(\"Number", "required=True) parser.add_argument('-j', '--job', help='Job (0/1)', required=True) parser.add_argument('-pn', '--pn', help='Petri net", "= args.strategy if DATA_PATH is None: train_file = os.path.join(WORK_PATH, \"data\",", "+ 1 dt_object = datetime.fromtimestamp(timestamp) line = str(event) + \"_\"", "+ \"_generalization.csv\") xes_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\"", "import factory as xes_exporter from pm4py.objects.log.importer.xes import factory as xes_importer", "net, initial_marking, final_marking) print(\"Fitness=\", fitness) precision = precision_factory.apply(log, net, initial_marking,", "strategy + \"_generalization.xes\") pn_file = os.path.join(DATA_PATH, \"pns\", system, pn) \"\"\"", "+ \"_train.txt\") gen_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\"", "fitness[\"log_fitness\"] generalization = 2 * ((fitness * precision) / (fitness", "+= 1 else: traces.append(row) f_traces = [] for trace in", "case + 1 writeToFile(str(to_path), lines) if __name__ == \"__main__\": parser", "of skipped traces are:\", str(skipped)) return f_traces def writeToFile(file, lst):", "= args.pn strategy = args.strategy if DATA_PATH is None: train_file", "\"pns\", system, pn) else: train_file = os.path.join(DATA_PATH, \"variants\", system +", "argparse.ArgumentParser() parser.add_argument('-s', '--system', help='Which system (e.g. pb_system_5_3)', required=True) parser.add_argument('-sfx', '--suffix',", "datetime.fromtimestamp(timestamp) line = str(event) + \"_\" + \",\" + str(case)", "help='naive/mh', required=True) args = parser.parse_args() system = args.system suffix =", "traces.append(row) else: skipped += 1 else: traces.append(row) f_traces = []", "os.path.join(WORK_PATH, \"data\", \"pns\", system, pn) else: train_file = os.path.join(DATA_PATH, \"variants\",", "net, initial_marking, final_marking = pnml_importer.import_net(pn_file) fitness = replay_factory.apply(log, net, initial_marking,", "def convertToCsv(traces, to_path): lines = [] case = 0 timestamp", "str(job) + \"_\" + strategy + \"_generalization.xes\") pn_file = os.path.join(WORK_PATH,", "+ strategy + \"_generalization.xes\") pn_file = os.path.join(DATA_PATH, \"pns\", system, pn)", "timestamp = timestamp + 1 dt_object = datetime.fromtimestamp(timestamp) line =", "\" on PN \", str(pn_file), \" using NAIVE SAMPLING on", "timestamp + 1 dt_object = datetime.fromtimestamp(timestamp) line = str(event) +", "using MH SAMPLING on suffix \", str(suffix),\" ***\") elif strategy", "((fitness * precision) / (fitness + precision)) if strategy ==", "lines.append(line) case = case + 1 writeToFile(str(to_path), lines) if __name__", "on suffix \", str(suffix),\" ***\") elif strategy == \"naive\": print(\"****", "\"data\", \"variants\", system + \"_train.txt\") gen_file = os.path.join(WORK_PATH, \"data\", \"avatar\",", "from pm4py.objects.log.importer.csv import factory as csv_importer from pm4py.objects.log.exporter.xes import factory", "else: skipped += 1 else: traces.append(row) f_traces = [] for", "pn) else: train_file = os.path.join(DATA_PATH, \"variants\", system + \"_train.txt\") gen_file", "[] for trace in traces: f_trace = [] t =", "on PN \", str(pn_file), \" using NAIVE SAMPLING on suffix", "os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\" + str(suffix) + \"_j\"", "factory as csv_importer from pm4py.objects.log.exporter.xes import factory as xes_exporter from", "READ FILES AND CONVERT TO XES \"\"\" traces = readFile(train_file,gen_file,", "DATA_PATH is None: train_file = os.path.join(WORK_PATH, \"data\", \"variants\", system +", "unique=False): traces = [] skipped = 0 with open(f_name1) as", "= os.path.join(DATA_PATH, \"pns\", system, pn) \"\"\" READ FILES AND CONVERT", "argparse from datetime import datetime from pm4py.objects.log.importer.csv import factory as", "time, argparse from datetime import datetime from pm4py.objects.log.importer.csv import factory", "file_contents = file.read() file_contents = file_contents.split(\"\\n\") print(\"Number of generated traces", "precision_factory.apply(log, net, initial_marking, final_marking) print(\"Precision=\", precision) fitness = fitness[\"log_fitness\"] generalization", "final_marking) print(\"Precision=\", precision) fitness = fitness[\"log_fitness\"] generalization = 2 *", "pnml as pnml_importer from pm4py.evaluation.replay_fitness import factory as replay_factory from", "t = trace.split(\" \") for i in t: if i", "0: f_traces.append(f_trace) print(\"Number of traces are:\", str(len(f_traces))) print(\"Number of skipped", "__name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-s', '--system', help='Which system", "\"_\" + strategy + \".txt\") csv_file = os.path.join(WORK_PATH, \"data\", \"avatar\",", "= file_contents.split(\"\\n\") print(\"Number of generated traces are:\", str(len(file_contents))) for row", "return f_traces def writeToFile(file, lst): with open(file, 'w') as outfile:", "\"_j\" + str(job) + \"_\" + strategy + \".txt\") csv_file", "in traces: f_trace = [] t = trace.split(\" \") for", "precision_factory from conf.settings import DATA_PATH WORK_PATH = os.path.abspath(os.getcwd()) def readFile(f_name1,", "from pm4py.objects.petri.importer import pnml as pnml_importer from pm4py.evaluation.replay_fitness import factory", "strategy + \"_generalization.csv\") xes_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system", "+ \"_j\" + str(job) + \"_\" + strategy + \"_generalization.csv\")", "file: file_contents = file.read() file_contents = file_contents.split(\"\\n\") print(\"Number of generated", "system, pn) \"\"\" READ FILES AND CONVERT TO XES \"\"\"", "are:\", str(len(f_traces))) print(\"Number of skipped traces are:\", str(skipped)) return f_traces", "XES \"\"\" traces = readFile(train_file,gen_file, unique=True) convertToCsv(traces=traces, to_path=csv_file) time.sleep(1) log", "timestamp = 0 line = \"concept:name,case:concept:name,time:timestamp\" lines.append(line) for trace in", "+ \"_generalization.csv\") xes_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system +", "args.pn strategy = args.strategy if DATA_PATH is None: train_file =", "else: traces.append(row) with open(f_name2) as file: file_contents = file.read() file_contents", "xes_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system + \"_relgan_\" +", "using NAIVE SAMPLING on suffix \", str(suffix), \" ***\") else:", "+ str(job) + \"_\" + strategy + \".txt\") csv_file =", "+ strategy + \"_generalization.xes\") pn_file = os.path.join(WORK_PATH, \"data\", \"pns\", system,", "convertToCsv(traces=traces, to_path=csv_file) time.sleep(1) log = csv_importer.import_event_log(csv_file) xes_exporter.export_log(log, xes_file) time.sleep(1) \"\"\"", "are:\", str(len(file_contents))) for row in file_contents: if unique: if row", "import factory as csv_importer from pm4py.objects.log.exporter.xes import factory as xes_exporter", "print(\"Number of skipped traces are:\", str(skipped)) return f_traces def writeToFile(file,", "\"variants\", system + \"_relgan_\" + str(suffix) + \"_j\" + str(job)", "== \"naive\": print(\"**** \", str(system), \" Job \", str(job), \"", "+ \",\" + str(dt_object) lines.append(line) case = case + 1", "\"data\", \"avatar\", \"variants\", system + \"_relgan_\" + str(suffix) + \"_j\"", "TO XES \"\"\" traces = readFile(train_file,gen_file, unique=True) convertToCsv(traces=traces, to_path=csv_file) time.sleep(1)", "f_trace = [] t = trace.split(\" \") for i in", "\",\" + str(dt_object) lines.append(line) case = case + 1 writeToFile(str(to_path),", "net file to evaluate', required=True) parser.add_argument('-strategy', '--strategy', help='naive/mh', required=True) args", "for trace in traces: for event in trace: timestamp =", "pnml_importer from pm4py.evaluation.replay_fitness import factory as replay_factory from pm4py.evaluation.precision import", "csv_importer.import_event_log(csv_file) xes_exporter.export_log(log, xes_file) time.sleep(1) \"\"\" PERFORM MEASUREMENT ON PN AND", "/ (fitness + precision)) if strategy == \"mh\": print(\"**** \",", "from pm4py.evaluation.precision import factory as precision_factory from conf.settings import DATA_PATH", "import pnml as pnml_importer from pm4py.evaluation.replay_fitness import factory as replay_factory", "case = 0 timestamp = 0 line = \"concept:name,case:concept:name,time:timestamp\" lines.append(line)", "pm4py.objects.log.importer.csv import factory as csv_importer from pm4py.objects.log.exporter.xes import factory as", "str(pn_file), \" using MH SAMPLING on suffix \", str(suffix),\" ***\")", "and \"<\" not in i: f_trace.append(i) if len(f_trace) > 0:", "= 2 * ((fitness * precision) / (fitness + precision))", "evaluate', required=True) parser.add_argument('-strategy', '--strategy', help='naive/mh', required=True) args = parser.parse_args() system", "else: traces.append(row) f_traces = [] for trace in traces: f_trace", "strategy + \".txt\") csv_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system", "lst: outfile.write(str(entry) + \"\\n\") def convertToCsv(traces, to_path): lines = []", "factory as precision_factory from conf.settings import DATA_PATH WORK_PATH = os.path.abspath(os.getcwd())", "help='Which system (e.g. pb_system_5_3)', required=True) parser.add_argument('-sfx', '--suffix', help='Suffix (chosen epoch,", "import factory as replay_factory from pm4py.evaluation.precision import factory as precision_factory", "CONVERT TO XES \"\"\" traces = readFile(train_file,gen_file, unique=True) convertToCsv(traces=traces, to_path=csv_file)", "= [] case = 0 timestamp = 0 line =", "strategy = args.strategy if DATA_PATH is None: train_file = os.path.join(WORK_PATH,", "file_contents.split(\"\\n\") print(\"Number of train traces are:\", str(len(file_contents))) for row in", "str(job) + \"_\" + strategy + \".txt\") csv_file = os.path.join(DATA_PATH,", "if DATA_PATH is None: train_file = os.path.join(WORK_PATH, \"data\", \"variants\", system", "= args.system suffix = int(args.suffix) job = args.job pn =", "+ \".txt\") csv_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system +", "readFile(train_file,gen_file, unique=True) convertToCsv(traces=traces, to_path=csv_file) time.sleep(1) log = csv_importer.import_event_log(csv_file) xes_exporter.export_log(log, xes_file)", "+ str(job) + \"_\" + strategy + \"_generalization.xes\") pn_file =", "strategy + \".txt\") csv_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system +", "pn_file = os.path.join(WORK_PATH, \"data\", \"pns\", system, pn) else: train_file =", "on suffix \", str(suffix), \" ***\") else: raise ValueError(\"Unknown strategy.\")", "+ str(case) + \",\" + str(dt_object) lines.append(line) case = case", "if unique: if row not in traces: traces.append(row) else: skipped", "case = case + 1 writeToFile(str(to_path), lines) if __name__ ==", "[] skipped = 0 with open(f_name1) as file: file_contents =", "\" on PN \", str(pn_file), \" using MH SAMPLING on", "system + \"_train.txt\") gen_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system +", "fitness = fitness[\"log_fitness\"] generalization = 2 * ((fitness * precision)", "unique=True) convertToCsv(traces=traces, to_path=csv_file) time.sleep(1) log = csv_importer.import_event_log(csv_file) xes_exporter.export_log(log, xes_file) time.sleep(1)", "not in traces: traces.append(row) else: skipped += 1 else: traces.append(row)", "if strategy == \"mh\": print(\"**** \", str(system), \" Job \",", "convertToCsv(traces, to_path): lines = [] case = 0 timestamp =", "= str(event) + \"_\" + \",\" + str(case) + \",\"", "= fitness[\"log_fitness\"] generalization = 2 * ((fitness * precision) /", "SAMPLING on suffix \", str(suffix),\" ***\") elif strategy == \"naive\":", "parser.add_argument('-sfx', '--suffix', help='Suffix (chosen epoch, e.g. 1981)', required=True) parser.add_argument('-j', '--job',", "= file.read() file_contents = file_contents.split(\"\\n\") print(\"Number of train traces are:\",", "+ \"\\n\") def convertToCsv(traces, to_path): lines = [] case =", "from pm4py.objects.log.importer.xes import factory as xes_importer from pm4py.objects.petri.importer import pnml", "required=True) parser.add_argument('-sfx', '--suffix', help='Suffix (chosen epoch, e.g. 1981)', required=True) parser.add_argument('-j',", "print(\"Fitness=\", fitness) precision = precision_factory.apply(log, net, initial_marking, final_marking) print(\"Precision=\", precision)", "str(suffix), \" ***\") else: raise ValueError(\"Unknown strategy.\") print(\"AVATAR Generalization=\", generalization)", "1 writeToFile(str(to_path), lines) if __name__ == \"__main__\": parser = argparse.ArgumentParser()", "gen_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\" + str(suffix)", "\"_train.txt\") gen_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system + \"_relgan_\"", "DATA_PATH WORK_PATH = os.path.abspath(os.getcwd()) def readFile(f_name1, f_name2, unique=False): traces =", "traces: traces.append(row) else: skipped += 1 else: traces.append(row) with open(f_name2)", "\"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-s', '--system', help='Which system (e.g. pb_system_5_3)',", "pm4py.objects.log.importer.xes import factory as xes_importer from pm4py.objects.petri.importer import pnml as", "csv_importer from pm4py.objects.log.exporter.xes import factory as xes_exporter from pm4py.objects.log.importer.xes import", "initial_marking, final_marking) print(\"Precision=\", precision) fitness = fitness[\"log_fitness\"] generalization = 2", "== \"mh\": print(\"**** \", str(system), \" Job \", str(job), \"", "\"\" and \"<\" not in i: f_trace.append(i) if len(f_trace) >", "traces.append(row) else: skipped += 1 else: traces.append(row) with open(f_name2) as", "print(\"Number of generated traces are:\", str(len(file_contents))) for row in file_contents:", "os, time, argparse from datetime import datetime from pm4py.objects.log.importer.csv import", "\"variants\", system + \"_train.txt\") gen_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system", "fitness) precision = precision_factory.apply(log, net, initial_marking, final_marking) print(\"Precision=\", precision) fitness", "[] case = 0 timestamp = 0 line = \"concept:name,case:concept:name,time:timestamp\"", "pm4py.evaluation.replay_fitness import factory as replay_factory from pm4py.evaluation.precision import factory as", "row not in traces: traces.append(row) else: skipped += 1 else:", "f_traces def writeToFile(file, lst): with open(file, 'w') as outfile: for", "+ \"_generalization.xes\") pn_file = os.path.join(DATA_PATH, \"pns\", system, pn) \"\"\" READ", "= args.job pn = args.pn strategy = args.strategy if DATA_PATH", "\"pns\", system, pn) \"\"\" READ FILES AND CONVERT TO XES", "+ str(dt_object) lines.append(line) case = case + 1 writeToFile(str(to_path), lines)", "= 0 timestamp = 0 line = \"concept:name,case:concept:name,time:timestamp\" lines.append(line) for", "\"mh\": print(\"**** \", str(system), \" Job \", str(job), \" on", "required=True) args = parser.parse_args() system = args.system suffix = int(args.suffix)", "datetime import datetime from pm4py.objects.log.importer.csv import factory as csv_importer from", "traces: traces.append(row) else: skipped += 1 else: traces.append(row) f_traces =", "\"_generalization.csv\") xes_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system + \"_relgan_\" +", "\"_j\" + str(job) + \"_\" + strategy + \"_generalization.csv\") xes_file", "\"_generalization.csv\") xes_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\", system + \"_relgan_\"", "os.path.join(DATA_PATH, \"pns\", system, pn) \"\"\" READ FILES AND CONVERT TO", "str(job), \" on PN \", str(pn_file), \" using MH SAMPLING", "+ strategy + \".txt\") csv_file = os.path.join(WORK_PATH, \"data\", \"avatar\", \"variants\",", "in traces: traces.append(row) else: skipped += 1 else: traces.append(row) f_traces", "f_traces = [] for trace in traces: f_trace = []", "0 line = \"concept:name,case:concept:name,time:timestamp\" lines.append(line) for trace in traces: for", "replay_factory.apply(log, net, initial_marking, final_marking) print(\"Fitness=\", fitness) precision = precision_factory.apply(log, net,", "in traces: traces.append(row) else: skipped += 1 else: traces.append(row) with", "as pnml_importer from pm4py.evaluation.replay_fitness import factory as replay_factory from pm4py.evaluation.precision", "if i != \"\" and \"<\" not in i: f_trace.append(i)", "in i: f_trace.append(i) if len(f_trace) > 0: f_traces.append(f_trace) print(\"Number of", "file_contents.split(\"\\n\") print(\"Number of generated traces are:\", str(len(file_contents))) for row in", "file.read() file_contents = file_contents.split(\"\\n\") print(\"Number of generated traces are:\", str(len(file_contents)))", "\"\"\" PERFORM MEASUREMENT ON PN AND XES\"\"\" log = xes_importer.import_log(xes_file)", "0 with open(f_name1) as file: file_contents = file.read() file_contents =", "parser.add_argument('-j', '--job', help='Job (0/1)', required=True) parser.add_argument('-pn', '--pn', help='Petri net file", "suffix = int(args.suffix) job = args.job pn = args.pn strategy", "precision) fitness = fitness[\"log_fitness\"] generalization = 2 * ((fitness *", "'--system', help='Which system (e.g. pb_system_5_3)', required=True) parser.add_argument('-sfx', '--suffix', help='Suffix (chosen", "of traces are:\", str(len(f_traces))) print(\"Number of skipped traces are:\", str(skipped))", "from pm4py.objects.log.exporter.xes import factory as xes_exporter from pm4py.objects.log.importer.xes import factory", "as outfile: for entry in lst: outfile.write(str(entry) + \"\\n\") def", "= file.read() file_contents = file_contents.split(\"\\n\") print(\"Number of generated traces are:\",", "writeToFile(str(to_path), lines) if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-s',", "1 dt_object = datetime.fromtimestamp(timestamp) line = str(event) + \"_\" +", "train_file = os.path.join(WORK_PATH, \"data\", \"variants\", system + \"_train.txt\") gen_file =", "PN \", str(pn_file), \" using NAIVE SAMPLING on suffix \",", "+ \"_generalization.xes\") pn_file = os.path.join(WORK_PATH, \"data\", \"pns\", system, pn) else:", "for event in trace: timestamp = timestamp + 1 dt_object", "pn) \"\"\" READ FILES AND CONVERT TO XES \"\"\" traces", "skipped += 1 else: traces.append(row) with open(f_name2) as file: file_contents", "str(system), \" Job \", str(job), \" on PN \", str(pn_file),", "+ strategy + \"_generalization.csv\") xes_file = os.path.join(DATA_PATH, \"avatar\", \"variants\", system", "= [] skipped = 0 with open(f_name1) as file: file_contents", "factory as replay_factory from pm4py.evaluation.precision import factory as precision_factory from", "time.sleep(1) log = csv_importer.import_event_log(csv_file) xes_exporter.export_log(log, xes_file) time.sleep(1) \"\"\" PERFORM MEASUREMENT", "to_path=csv_file) time.sleep(1) log = csv_importer.import_event_log(csv_file) xes_exporter.export_log(log, xes_file) time.sleep(1) \"\"\" PERFORM", "train traces are:\", str(len(file_contents))) for row in file_contents: if unique:", "0 timestamp = 0 line = \"concept:name,case:concept:name,time:timestamp\" lines.append(line) for trace", "system = args.system suffix = int(args.suffix) job = args.job pn", "traces = [] skipped = 0 with open(f_name1) as file:", "file: file_contents = file.read() file_contents = file_contents.split(\"\\n\") print(\"Number of train", "as precision_factory from conf.settings import DATA_PATH WORK_PATH = os.path.abspath(os.getcwd()) def", "= trace.split(\" \") for i in t: if i !=", "def writeToFile(file, lst): with open(file, 'w') as outfile: for entry", "= 0 line = \"concept:name,case:concept:name,time:timestamp\" lines.append(line) for trace in traces:", "'--pn', help='Petri net file to evaluate', required=True) parser.add_argument('-strategy', '--strategy', help='naive/mh',", "= int(args.suffix) job = args.job pn = args.pn strategy =", "strategy + \"_generalization.xes\") pn_file = os.path.join(WORK_PATH, \"data\", \"pns\", system, pn)", "suffix \", str(suffix),\" ***\") elif strategy == \"naive\": print(\"**** \",", "= \"concept:name,case:concept:name,time:timestamp\" lines.append(line) for trace in traces: for event in", "pm4py.evaluation.precision import factory as precision_factory from conf.settings import DATA_PATH WORK_PATH", "i != \"\" and \"<\" not in i: f_trace.append(i) if", "precision)) if strategy == \"mh\": print(\"**** \", str(system), \" Job", "str(pn_file), \" using NAIVE SAMPLING on suffix \", str(suffix), \"", "str(suffix) + \"_j\" + str(job) + \"_\" + strategy +", "print(\"Number of traces are:\", str(len(f_traces))) print(\"Number of skipped traces are:\",", "replay_factory from pm4py.evaluation.precision import factory as precision_factory from conf.settings import", "traces are:\", str(skipped)) return f_traces def writeToFile(file, lst): with open(file,", "to_path): lines = [] case = 0 timestamp = 0", "args = parser.parse_args() system = args.system suffix = int(args.suffix) job", "import os, time, argparse from datetime import datetime from pm4py.objects.log.importer.csv", "job = args.job pn = args.pn strategy = args.strategy if", "lines) if __name__ == \"__main__\": parser = argparse.ArgumentParser() parser.add_argument('-s', '--system',", "\", str(system), \" Job \", str(job), \" on PN \",", "import factory as xes_importer from pm4py.objects.petri.importer import pnml as pnml_importer", "2 * ((fitness * precision) / (fitness + precision)) if", "as xes_importer from pm4py.objects.petri.importer import pnml as pnml_importer from pm4py.evaluation.replay_fitness", "parser.parse_args() system = args.system suffix = int(args.suffix) job = args.job", "f_traces.append(f_trace) print(\"Number of traces are:\", str(len(f_traces))) print(\"Number of skipped traces", "precision = precision_factory.apply(log, net, initial_marking, final_marking) print(\"Precision=\", precision) fitness =", "for i in t: if i != \"\" and \"<\"", "+ precision)) if strategy == \"mh\": print(\"**** \", str(system), \"", "else: train_file = os.path.join(DATA_PATH, \"variants\", system + \"_train.txt\") gen_file =", "else: skipped += 1 else: traces.append(row) with open(f_name2) as file:", "pn_file = os.path.join(DATA_PATH, \"pns\", system, pn) \"\"\" READ FILES AND", "help='Petri net file to evaluate', required=True) parser.add_argument('-strategy', '--strategy', help='naive/mh', required=True)", "args.system suffix = int(args.suffix) job = args.job pn = args.pn", "= [] t = trace.split(\" \") for i in t:", "is None: train_file = os.path.join(WORK_PATH, \"data\", \"variants\", system + \"_train.txt\")", "str(suffix),\" ***\") elif strategy == \"naive\": print(\"**** \", str(system), \"", "with open(f_name2) as file: file_contents = file.read() file_contents = file_contents.split(\"\\n\")", "file.read() file_contents = file_contents.split(\"\\n\") print(\"Number of train traces are:\", str(len(file_contents)))", "'--strategy', help='naive/mh', required=True) args = parser.parse_args() system = args.system suffix", "pm4py.objects.petri.importer import pnml as pnml_importer from pm4py.evaluation.replay_fitness import factory as", "os.path.join(WORK_PATH, \"data\", \"variants\", system + \"_train.txt\") gen_file = os.path.join(WORK_PATH, \"data\",", "str(job) + \"_\" + strategy + \".txt\") csv_file = os.path.join(WORK_PATH,", "train_file = os.path.join(DATA_PATH, \"variants\", system + \"_train.txt\") gen_file = os.path.join(DATA_PATH,", "readFile(f_name1, f_name2, unique=False): traces = [] skipped = 0 with", "are:\", str(skipped)) return f_traces def writeToFile(file, lst): with open(file, 'w')", "factory as xes_importer from pm4py.objects.petri.importer import pnml as pnml_importer from", "(chosen epoch, e.g. 1981)', required=True) parser.add_argument('-j', '--job', help='Job (0/1)', required=True)", "+ \"_relgan_\" + str(suffix) + \"_j\" + str(job) + \"_\"" ]
[ "cdll lib = cdll.LoadLibrary(\"../target/release/libembed.dylib\") #=> for Mac #lib = cdll.LoadLibrary(\"../target/release/libembed.so\")", "# $ python embed.py from ctypes import cdll lib =", "lib = cdll.LoadLibrary(\"../target/release/libembed.dylib\") #=> for Mac #lib = cdll.LoadLibrary(\"../target/release/libembed.so\") #=>", "= cdll.LoadLibrary(\"../target/release/libembed.dylib\") #=> for Mac #lib = cdll.LoadLibrary(\"../target/release/libembed.so\") #=> for", "$ python embed.py from ctypes import cdll lib = cdll.LoadLibrary(\"../target/release/libembed.dylib\")", "cdll.LoadLibrary(\"../target/release/libembed.dylib\") #=> for Mac #lib = cdll.LoadLibrary(\"../target/release/libembed.so\") #=> for Linux", "for Mac #lib = cdll.LoadLibrary(\"../target/release/libembed.so\") #=> for Linux lib.process() print(\"done!\")", "#=> for Mac #lib = cdll.LoadLibrary(\"../target/release/libembed.so\") #=> for Linux lib.process()", "embed.py from ctypes import cdll lib = cdll.LoadLibrary(\"../target/release/libembed.dylib\") #=> for", "from ctypes import cdll lib = cdll.LoadLibrary(\"../target/release/libembed.dylib\") #=> for Mac", "ctypes import cdll lib = cdll.LoadLibrary(\"../target/release/libembed.dylib\") #=> for Mac #lib", "import cdll lib = cdll.LoadLibrary(\"../target/release/libembed.dylib\") #=> for Mac #lib =", "python embed.py from ctypes import cdll lib = cdll.LoadLibrary(\"../target/release/libembed.dylib\") #=>" ]
[ "local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream()", "SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type(", "the HTTP request and returns deserialized data. :param resource_path: Path", "response_type=None, response_headers=None, auth_settings=None, collection_formats=None, request_type=None): \"\"\"Makes the HTTP request and", "request_type=None): \"\"\"Makes the HTTP request and returns deserialized data. :param", "import six from huaweicloudsdkcore.client import Client, ClientBuilder from huaweicloudsdkcore.exceptions import", "import exceptions from huaweicloudsdkcore.utils import http_utils from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest", "body_params = None if 'body' in local_var_params: body_params = local_var_params['body']", "data type. :param response_headers: Header should be added to response", "exceptions from huaweicloudsdkcore.utils import http_utils from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest class", "object for this client :param pool_threads: The number of threads", "attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr)", "The number of threads to use for async requests to", "\"\"\" all_params = ['body'] local_var_params = {} for attr in", "SdkStreamRequest class ImageClient(Client): \"\"\" :param configuration: .Configuration object for this", "request header. :param body: Request body. :param post_params dict: Request", "body: Request body. :param post_params dict: Request post form parameters,", "__future__ import absolute_import import datetime import re import importlib import", "huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest class ImageClient(Client): \"\"\" :param configuration: .Configuration object", ".Configuration object for this client :param pool_threads: The number of", "data. :param resource_path: Path to method endpoint. :param method: Method", "run_celebrity_recognition_with_http_info(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\"", "resource_path='/v1.0/image/celebrity-recognition', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunCelebrityRecognitionResponse', response_headers=response_headers, auth_settings=auth_settings,", "自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\" return self.run_image_tagging_with_http_info(request) def", "'object': object, } def __init__(self): super(ImageClient, self).__init__() self.model_package = importlib.import_module(\"huaweicloudsdkimage.v1.model\")", "post_params dict: Request post form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param", "auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def run_image_tagging(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest", "collection_formats=None, request_type=None): \"\"\"Makes the HTTP request and returns deserialized data.", "\"ImageClient\": raise TypeError(\"client type error, support client type is ImageClient\")", "'str': str, 'bool': bool, 'date': datetime.date, 'datetime': datetime.datetime, 'object': object,", "Client, ClientBuilder from huaweicloudsdkcore.exceptions import exceptions from huaweicloudsdkcore.utils import http_utils", ":param RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\" return self.run_image_tagging_with_http_info(request) def run_image_tagging_with_http_info(self,", "dict: Request post form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings", "def new_builder(cls, clazz=None): if clazz is None: return ClientBuilder(cls) if", "request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\" all_params", "RunImageTaggingResponse \"\"\" return self.run_image_tagging_with_http_info(request) def run_image_tagging_with_http_info(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param", "= { 'int': int, 'long': int if six.PY3 else long,", "six.integer_types NATIVE_TYPES_MAPPING = { 'int': int, 'long': int if six.PY3", "for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request,", "body=body_params, post_params=form_params, response_type='RunImageTaggingResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def call_api(self, resource_path,", "{} query_params = [] header_params = {} form_params = {}", "all_params = ['body'] local_var_params = {} for attr in request.attribute_map:", "response_type='RunCelebrityRecognitionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def run_image_tagging(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。", "type. :param response_headers: Header should be added to response data.", "method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunImageTaggingResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats,", "path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunImageTaggingResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__)", "# coding: utf-8 from __future__ import absolute_import import datetime import", "object, } def __init__(self): super(ImageClient, self).__init__() self.model_package = importlib.import_module(\"huaweicloudsdkimage.v1.model\") self.preset_headers", "getattr(request, attr) collection_formats = {} path_params = {} query_params =", "Response data type. :param response_headers: Header should be added to", "{'User-Agent': 'HuaweiCloud-SDK-Python'} @classmethod def new_builder(cls, clazz=None): if clazz is None:", "'bool': bool, 'date': datetime.date, 'datetime': datetime.datetime, 'object': object, } def", "Path to method endpoint. :param method: Method to call. :param", "= ['body'] local_var_params = {} for attr in request.attribute_map: if", "parameters in the url. :param query_params: Query parameters in the", "post form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list: Auth", "auth_settings=None, collection_formats=None, request_type=None): \"\"\"Makes the HTTP request and returns deserialized", "= [] header_params = {} form_params = {} body_params =", "url. :param query_params: Query parameters in the url. :param header_params:", "HTTP request and returns deserialized data. :param resource_path: Path to", "clazz is None: return ClientBuilder(cls) if clazz.__name__ != \"ImageClient\": raise", "query_params=None, header_params=None, body=None, post_params=None, response_type=None, response_headers=None, auth_settings=None, collection_formats=None, request_type=None): \"\"\"Makes", "if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type']", ":param post_params dict: Request post form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`.", "collection_formats=collection_formats, request_type=request.__class__.__name__) def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,", "http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/v1.0/image/celebrity-recognition', method='POST', path_params=path_params,", "= {} path_params = {} query_params = [] header_params =", "RunCelebrityRecognitionResponse \"\"\" return self.run_celebrity_recognition_with_http_info(request) def run_celebrity_recognition_with_http_info(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param", "path, query, header, and post parameters. :param request_type: Request data", "header_params = {} form_params = {} body_params = None if", "to response data. :param collection_formats: dict of collection formats for", "header_params=header_params, body=body_params, post_params=form_params, response_type='RunCelebrityRecognitionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def run_image_tagging(self,", "super(ImageClient, self).__init__() self.model_package = importlib.import_module(\"huaweicloudsdkimage.v1.model\") self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'} @classmethod", ":return: RunImageTaggingResponse \"\"\" return self.run_image_tagging_with_http_info(request) def run_image_tagging_with_http_info(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。", ":param path_params: Path parameters in the url. :param query_params: Query", "requests to the API. More threads means more concurrent API", "the response directly. \"\"\" return self.do_http_request( method=method, resource_path=resource_path, path_params=path_params, query_params=query_params,", "is None: return ClientBuilder(cls) if clazz.__name__ != \"ImageClient\": raise TypeError(\"client", "clazz.__name__ != \"ImageClient\": raise TypeError(\"client type error, support client type", "[] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api(", ":return: RunImageTaggingResponse \"\"\" all_params = ['body'] local_var_params = {} for", "return ClientBuilder(cls) if clazz.__name__ != \"ImageClient\": raise TypeError(\"client type error,", "if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest):", "for this client :param pool_threads: The number of threads to", "resource_path='/v1.0/image/tagging', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunImageTaggingResponse', response_headers=response_headers, auth_settings=auth_settings,", "if six.PY3 else long, 'float': float, 'str': str, 'bool': bool,", ":param method: Method to call. :param path_params: Path parameters in", "self.run_celebrity_recognition_with_http_info(request) def run_celebrity_recognition_with_http_info(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return:", "long, 'float': float, 'str': str, 'bool': bool, 'date': datetime.date, 'datetime':", "parameters in the url. :param header_params: Header parameters to be", "Method to call. :param path_params: Path parameters in the url.", "local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params = {}", "RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\" all_params = ['body'] local_var_params =", "if clazz is None: return ClientBuilder(cls) if clazz.__name__ != \"ImageClient\":", "returns deserialized data. :param resource_path: Path to method endpoint. :param", "RunImageTaggingResponse \"\"\" all_params = ['body'] local_var_params = {} for attr", ":param query_params: Query parameters in the url. :param header_params: Header", "path_params = {} query_params = [] header_params = {} form_params", "\"\"\" return self.run_celebrity_recognition_with_http_info(request) def run_celebrity_recognition_with_http_info(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest", "formats for path, query, header, and post parameters. :param request_type:", "<gh_stars>1-10 # coding: utf-8 from __future__ import absolute_import import datetime", "configuration: .Configuration object for this client :param pool_threads: The number", "Request body. :param post_params dict: Request post form parameters, for", "datetime.datetime, 'object': object, } def __init__(self): super(ImageClient, self).__init__() self.model_package =", "= {} query_params = [] header_params = {} form_params =", "the request header. :param body: Request body. :param post_params dict:", "ImageClient\") return ClientBuilder(clazz) def run_celebrity_recognition(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest", "call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None, post_params=None, response_type=None, response_headers=None,", "post_params=form_params, response_type='RunCelebrityRecognitionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def run_image_tagging(self, request): \"\"\"图像标签", "request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\" all_params", "in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params =", "Settings names for the request. :param response_type: Response data type.", "分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\" return self.run_celebrity_recognition_with_http_info(request) def", "header_params: Header parameters to be placed in the request header.", "request. :param response_type: Response data type. :param response_headers: Header should", ":param response_headers: Header should be added to response data. :param", "ClientBuilder(cls) if clazz.__name__ != \"ImageClient\": raise TypeError(\"client type error, support", "= importlib.import_module(\"huaweicloudsdkimage.v1.model\") self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'} @classmethod def new_builder(cls, clazz=None):", "and returns deserialized data. :param resource_path: Path to method endpoint.", "async requests to the API. More threads means more concurrent", "\"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\" all_params =", "= {} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr]", "form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list: Auth Settings", "!= \"ImageClient\": raise TypeError(\"client type error, support client type is", "__init__(self): super(ImageClient, self).__init__() self.model_package = importlib.import_module(\"huaweicloudsdkimage.v1.model\") self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}", "float, 'str': str, 'bool': bool, 'date': datetime.date, 'datetime': datetime.datetime, 'object':", "the API. More threads means more concurrent API requests. \"\"\"", "RunCelebrityRecognitionResponse \"\"\" all_params = ['body'] local_var_params = {} for attr", "absolute_import import datetime import re import importlib import six from", "to the API. More threads means more concurrent API requests.", "import importlib import six from huaweicloudsdkcore.client import Client, ClientBuilder from", "ImageClient(Client): \"\"\" :param configuration: .Configuration object for this client :param", "requests. \"\"\" PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types", "http_utils from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest class ImageClient(Client): \"\"\" :param configuration:", "path_params: Path parameters in the url. :param query_params: Query parameters", "to method endpoint. :param method: Method to call. :param path_params:", "import Client, ClientBuilder from huaweicloudsdkcore.exceptions import exceptions from huaweicloudsdkcore.utils import", "= {} form_params = {} body_params = None if 'body'", "path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunCelebrityRecognitionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__)", "collection_formats: dict of collection formats for path, query, header, and", "= getattr(request, attr) collection_formats = {} path_params = {} query_params", "post parameters. :param request_type: Request data type. :return: Return the", "this client :param pool_threads: The number of threads to use", "url. :param header_params: Header parameters to be placed in the", "More threads means more concurrent API requests. \"\"\" PRIMITIVE_TYPES =", "ClientBuilder(clazz) def run_celebrity_recognition(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return:", "Header should be added to response data. :param collection_formats: dict", "API requests. \"\"\" PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) +", "request_type=request.__class__.__name__) def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None, post_params=None,", "{} for attr in request.attribute_map: if hasattr(request, attr): local_var_params[attr] =", "def run_image_tagging(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return: RunImageTaggingResponse", "deserialized data. :param resource_path: Path to method endpoint. :param method:", "self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'} @classmethod def new_builder(cls, clazz=None): if clazz", "hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params", "{} body_params = None if 'body' in local_var_params: body_params =", "threads to use for async requests to the API. More", "request_type: Request data type. :return: Return the response directly. \"\"\"", ":param RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\" all_params = ['body'] local_var_params", "header, and post parameters. :param request_type: Request data type. :return:", "run_image_tagging_with_http_info(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\"", "in the url. :param query_params: Query parameters in the url.", "} def __init__(self): super(ImageClient, self).__init__() self.model_package = importlib.import_module(\"huaweicloudsdkimage.v1.model\") self.preset_headers =", "return self.call_api( resource_path='/v1.0/image/tagging', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunImageTaggingResponse',", "header_params=None, body=None, post_params=None, response_type=None, response_headers=None, auth_settings=None, collection_formats=None, request_type=None): \"\"\"Makes the", "return self.run_image_tagging_with_http_info(request) def run_image_tagging_with_http_info(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request", "Return the response directly. \"\"\" return self.do_http_request( method=method, resource_path=resource_path, path_params=path_params,", "auth_settings = [] return self.call_api( resource_path='/v1.0/image/celebrity-recognition', method='POST', path_params=path_params, query_params=query_params, header_params=header_params,", "re import importlib import six from huaweicloudsdkcore.client import Client, ClientBuilder", "post_params=None, response_type=None, response_headers=None, auth_settings=None, collection_formats=None, request_type=None): \"\"\"Makes the HTTP request", "data type. :return: Return the response directly. \"\"\" return self.do_http_request(", "query_params: Query parameters in the url. :param header_params: Header parameters", "from __future__ import absolute_import import datetime import re import importlib", "import re import importlib import six from huaweicloudsdkcore.client import Client,", "def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None, post_params=None, response_type=None,", "and post parameters. :param request_type: Request data type. :return: Return", "local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = []", "'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params", "'int': int, 'long': int if six.PY3 else long, 'float': float,", "be placed in the request header. :param body: Request body.", "huaweicloudsdkcore.utils import http_utils from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest class ImageClient(Client): \"\"\"", "means more concurrent API requests. \"\"\" PRIMITIVE_TYPES = (float, bool,", "body=body_params, post_params=form_params, response_type='RunCelebrityRecognitionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def run_image_tagging(self, request):", "should be added to response data. :param collection_formats: dict of", ":param request_type: Request data type. :return: Return the response directly.", "parameters. :param request_type: Request data type. :return: Return the response", "query, header, and post parameters. :param request_type: Request data type.", "['body'] local_var_params = {} for attr in request.attribute_map: if hasattr(request,", "from huaweicloudsdkcore.client import Client, ClientBuilder from huaweicloudsdkcore.exceptions import exceptions from", "bytes, six.text_type) + six.integer_types NATIVE_TYPES_MAPPING = { 'int': int, 'long':", "bool, 'date': datetime.date, 'datetime': datetime.datetime, 'object': object, } def __init__(self):", "of threads to use for async requests to the API.", "body=None, post_params=None, response_type=None, response_headers=None, auth_settings=None, collection_formats=None, request_type=None): \"\"\"Makes the HTTP", "'long': int if six.PY3 else long, 'float': float, 'str': str,", "query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunImageTaggingResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def", "RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\" return self.run_celebrity_recognition_with_http_info(request) def run_celebrity_recognition_with_http_info(self, request):", "\"\"\" PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types NATIVE_TYPES_MAPPING", "for async requests to the API. More threads means more", ":return: Return the response directly. \"\"\" return self.do_http_request( method=method, resource_path=resource_path,", "endpoint. :param method: Method to call. :param path_params: Path parameters", "{ 'int': int, 'long': int if six.PY3 else long, 'float':", ":return: RunCelebrityRecognitionResponse \"\"\" return self.run_celebrity_recognition_with_http_info(request) def run_celebrity_recognition_with_http_info(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。", "\"\"\" :param configuration: .Configuration object for this client :param pool_threads:", "\"\"\"Makes the HTTP request and returns deserialized data. :param resource_path:", "= (float, bool, bytes, six.text_type) + six.integer_types NATIVE_TYPES_MAPPING = {", "use for async requests to the API. More threads means", "'float': float, 'str': str, 'bool': bool, 'date': datetime.date, 'datetime': datetime.datetime,", "Query parameters in the url. :param header_params: Header parameters to", "request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\" return", "for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list: Auth Settings names for", "= {} body_params = None if 'body' in local_var_params: body_params", "query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunCelebrityRecognitionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def", "response_headers: Header should be added to response data. :param collection_formats:", "import absolute_import import datetime import re import importlib import six", "list: Auth Settings names for the request. :param response_type: Response", ":param header_params: Header parameters to be placed in the request", "local_var_params = {} for attr in request.attribute_map: if hasattr(request, attr):", "+ six.integer_types NATIVE_TYPES_MAPPING = { 'int': int, 'long': int if", ":param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\" return self.run_celebrity_recognition_with_http_info(request) def run_celebrity_recognition_with_http_info(self,", "['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/v1.0/image/tagging', method='POST', path_params=path_params, query_params=query_params,", "@classmethod def new_builder(cls, clazz=None): if clazz is None: return ClientBuilder(cls)", "Request data type. :return: Return the response directly. \"\"\" return", "method=method, resource_path=resource_path, path_params=path_params, query_params=query_params, header_params=header_params, body=body, post_params=post_params, response_type=response_type, response_headers=response_headers, collection_formats=collection_formats,", ":param configuration: .Configuration object for this client :param pool_threads: The", "return ClientBuilder(clazz) def run_celebrity_recognition(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request", "request :return: RunCelebrityRecognitionResponse \"\"\" all_params = ['body'] local_var_params = {}", "{} form_params = {} body_params = None if 'body' in", "number of threads to use for async requests to the", ":param body: Request body. :param post_params dict: Request post form", "str, 'bool': bool, 'date': datetime.date, 'datetime': datetime.datetime, 'object': object, }", "to use for async requests to the API. More threads", "attr) collection_formats = {} path_params = {} query_params = []", "in the request header. :param body: Request body. :param post_params", "collection formats for path, query, header, and post parameters. :param", "post_params=form_params, response_type='RunImageTaggingResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def call_api(self, resource_path, method,", "[] return self.call_api( resource_path='/v1.0/image/tagging', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params,", "self.run_image_tagging_with_http_info(request) def run_image_tagging_with_http_info(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return:", "{} path_params = {} query_params = [] header_params = {}", "header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/v1.0/image/celebrity-recognition',", "response data. :param collection_formats: dict of collection formats for path,", "client :param pool_threads: The number of threads to use for", "importlib import six from huaweicloudsdkcore.client import Client, ClientBuilder from huaweicloudsdkcore.exceptions", ":param response_type: Response data type. :param response_headers: Header should be", "def __init__(self): super(ImageClient, self).__init__() self.model_package = importlib.import_module(\"huaweicloudsdkimage.v1.model\") self.preset_headers = {'User-Agent':", "'datetime': datetime.datetime, 'object': object, } def __init__(self): super(ImageClient, self).__init__() self.model_package", "[] return self.call_api( resource_path='/v1.0/image/celebrity-recognition', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params,", "Path parameters in the url. :param query_params: Query parameters in", "\"\"\" return self.do_http_request( method=method, resource_path=resource_path, path_params=path_params, query_params=query_params, header_params=header_params, body=body, post_params=post_params,", "the url. :param query_params: Query parameters in the url. :param", "names for the request. :param response_type: Response data type. :param", "to call. :param path_params: Path parameters in the url. :param", "class ImageClient(Client): \"\"\" :param configuration: .Configuration object for this client", "response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = []", "datetime.date, 'datetime': datetime.datetime, 'object': object, } def __init__(self): super(ImageClient, self).__init__()", "Request post form parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list:", "def run_celebrity_recognition(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse", "import SdkStreamRequest class ImageClient(Client): \"\"\" :param configuration: .Configuration object for", "in request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats", "header_params=header_params, body=body_params, post_params=form_params, response_type='RunImageTaggingResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def call_api(self,", "is ImageClient\") return ClientBuilder(clazz) def run_celebrity_recognition(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param", "header. :param body: Request body. :param post_params dict: Request post", ":param collection_formats: dict of collection formats for path, query, header,", "None if 'body' in local_var_params: body_params = local_var_params['body'] if isinstance(request,", "if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats = {}", "\"\"\" return self.run_image_tagging_with_http_info(request) def run_image_tagging_with_http_info(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest", "(float, bool, bytes, six.text_type) + six.integer_types NATIVE_TYPES_MAPPING = { 'int':", "None: return ClientBuilder(cls) if clazz.__name__ != \"ImageClient\": raise TypeError(\"client type", "response_headers=None, auth_settings=None, collection_formats=None, request_type=None): \"\"\"Makes the HTTP request and returns", "= [] return self.call_api( resource_path='/v1.0/image/celebrity-recognition', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params,", "method: Method to call. :param path_params: Path parameters in the", "API. More threads means more concurrent API requests. \"\"\" PRIMITIVE_TYPES", "from huaweicloudsdkcore.utils import http_utils from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest class ImageClient(Client):", "clazz=None): if clazz is None: return ClientBuilder(cls) if clazz.__name__ !=", "= http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/v1.0/image/celebrity-recognition', method='POST',", "run_image_tagging(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\"", "request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\" return", "parameters, for `application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list: Auth Settings names", "request_type=request.__class__.__name__) def run_image_tagging(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return:", "'HuaweiCloud-SDK-Python'} @classmethod def new_builder(cls, clazz=None): if clazz is None: return", "collection_formats=collection_formats, request_type=request.__class__.__name__) def run_image_tagging(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request", "directly. \"\"\" return self.do_http_request( method=method, resource_path=resource_path, path_params=path_params, query_params=query_params, header_params=header_params, body=body,", "= None if 'body' in local_var_params: body_params = local_var_params['body'] if", "concurrent API requests. \"\"\" PRIMITIVE_TYPES = (float, bool, bytes, six.text_type)", "dict of collection formats for path, query, header, and post", "自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\" all_params = ['body']", "client type is ImageClient\") return ClientBuilder(clazz) def run_celebrity_recognition(self, request): \"\"\"名人识别", ":param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\" all_params = ['body'] local_var_params", "query_params = [] header_params = {} form_params = {} body_params", "run_celebrity_recognition(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\"", "raise TypeError(\"client type error, support client type is ImageClient\") return", "def run_celebrity_recognition_with_http_info(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse", "six.PY3 else long, 'float': float, 'str': str, 'bool': bool, 'date':", "body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8'])", "self.do_http_request( method=method, resource_path=resource_path, path_params=path_params, query_params=query_params, header_params=header_params, body=body, post_params=post_params, response_type=response_type, response_headers=response_headers,", "response directly. \"\"\" return self.do_http_request( method=method, resource_path=resource_path, path_params=path_params, query_params=query_params, header_params=header_params,", "self.call_api( resource_path='/v1.0/image/celebrity-recognition', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunCelebrityRecognitionResponse', response_headers=response_headers,", "response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def run_image_tagging(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param", "of collection formats for path, query, header, and post parameters.", "int, 'long': int if six.PY3 else long, 'float': float, 'str':", "request.attribute_map: if hasattr(request, attr): local_var_params[attr] = getattr(request, attr) collection_formats =", "for path, query, header, and post parameters. :param request_type: Request", "import datetime import re import importlib import six from huaweicloudsdkcore.client", "RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\" return self.run_image_tagging_with_http_info(request) def run_image_tagging_with_http_info(self, request):", "threads means more concurrent API requests. \"\"\" PRIMITIVE_TYPES = (float,", "import http_utils from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest class ImageClient(Client): \"\"\" :param", "'date': datetime.date, 'datetime': datetime.datetime, 'object': object, } def __init__(self): super(ImageClient,", "= http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/v1.0/image/tagging', method='POST',", "bool, bytes, six.text_type) + six.integer_types NATIVE_TYPES_MAPPING = { 'int': int,", "response_type: Response data type. :param response_headers: Header should be added", "分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\" all_params = ['body']", "= {'User-Agent': 'HuaweiCloud-SDK-Python'} @classmethod def new_builder(cls, clazz=None): if clazz is", "TypeError(\"client type error, support client type is ImageClient\") return ClientBuilder(clazz)", "six from huaweicloudsdkcore.client import Client, ClientBuilder from huaweicloudsdkcore.exceptions import exceptions", "data. :param collection_formats: dict of collection formats for path, query,", "to be placed in the request header. :param body: Request", "pool_threads: The number of threads to use for async requests", "= local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers =", "request :return: RunCelebrityRecognitionResponse \"\"\" return self.run_celebrity_recognition_with_http_info(request) def run_celebrity_recognition_with_http_info(self, request): \"\"\"名人识别", "attr): local_var_params[attr] = getattr(request, attr) collection_formats = {} path_params =", "six.text_type) + six.integer_types NATIVE_TYPES_MAPPING = { 'int': int, 'long': int", "added to response data. :param collection_formats: dict of collection formats", "= [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return", "huaweicloudsdkcore.exceptions import exceptions from huaweicloudsdkcore.utils import http_utils from huaweicloudsdkcore.sdk_stream_request import", "resource_path=resource_path, path_params=path_params, query_params=query_params, header_params=header_params, body=body, post_params=post_params, response_type=response_type, response_headers=response_headers, collection_formats=collection_formats, request_type=request_type)", ":param auth_settings list: Auth Settings names for the request. :param", "`application/x-www-form-urlencoded`, `multipart/form-data`. :param auth_settings list: Auth Settings names for the", ":param resource_path: Path to method endpoint. :param method: Method to", "form_params = {} body_params = None if 'body' in local_var_params:", "placed in the request header. :param body: Request body. :param", "method, path_params=None, query_params=None, header_params=None, body=None, post_params=None, response_type=None, response_headers=None, auth_settings=None, collection_formats=None,", "response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def call_api(self, resource_path, method, path_params=None, query_params=None,", "error, support client type is ImageClient\") return ClientBuilder(clazz) def run_celebrity_recognition(self,", "= [] return self.call_api( resource_path='/v1.0/image/tagging', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params,", "isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers = [] header_params['Content-Type'] =", "resource_path, method, path_params=None, query_params=None, header_params=None, body=None, post_params=None, response_type=None, response_headers=None, auth_settings=None,", "Header parameters to be placed in the request header. :param", "support client type is ImageClient\") return ClientBuilder(clazz) def run_celebrity_recognition(self, request):", "return self.run_celebrity_recognition_with_http_info(request) def run_celebrity_recognition_with_http_info(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request", "for the request. :param response_type: Response data type. :param response_headers:", "the request. :param response_type: Response data type. :param response_headers: Header", "huaweicloudsdkcore.client import Client, ClientBuilder from huaweicloudsdkcore.exceptions import exceptions from huaweicloudsdkcore.utils", "body. :param post_params dict: Request post form parameters, for `application/x-www-form-urlencoded`,", "self).__init__() self.model_package = importlib.import_module(\"huaweicloudsdkimage.v1.model\") self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'} @classmethod def", "PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types NATIVE_TYPES_MAPPING =", "type. :return: Return the response directly. \"\"\" return self.do_http_request( method=method,", "utf-8 from __future__ import absolute_import import datetime import re import", ":param pool_threads: The number of threads to use for async", "importlib.import_module(\"huaweicloudsdkimage.v1.model\") self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'} @classmethod def new_builder(cls, clazz=None): if", "http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/v1.0/image/tagging', method='POST', path_params=path_params,", "body_params = local_var_params['body'] if isinstance(request, SdkStreamRequest): body_params = request.get_file_stream() response_headers", "method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunCelebrityRecognitionResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats,", "return self.do_http_request( method=method, resource_path=resource_path, path_params=path_params, query_params=query_params, header_params=header_params, body=body, post_params=post_params, response_type=response_type,", "Auth Settings names for the request. :param response_type: Response data", "auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None,", "resource_path: Path to method endpoint. :param method: Method to call.", "parameters to be placed in the request header. :param body:", "return self.call_api( resource_path='/v1.0/image/celebrity-recognition', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunCelebrityRecognitionResponse',", "type error, support client type is ImageClient\") return ClientBuilder(clazz) def", "request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings =", "request :return: RunImageTaggingResponse \"\"\" all_params = ['body'] local_var_params = {}", "method endpoint. :param method: Method to call. :param path_params: Path", "ClientBuilder from huaweicloudsdkcore.exceptions import exceptions from huaweicloudsdkcore.utils import http_utils from", "int if six.PY3 else long, 'float': float, 'str': str, 'bool':", "[] header_params = {} form_params = {} body_params = None", "self.call_api( resource_path='/v1.0/image/tagging', method='POST', path_params=path_params, query_params=query_params, header_params=header_params, body=body_params, post_params=form_params, response_type='RunImageTaggingResponse', response_headers=response_headers,", "collection_formats = {} path_params = {} query_params = [] header_params", "self.model_package = importlib.import_module(\"huaweicloudsdkimage.v1.model\") self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'} @classmethod def new_builder(cls,", "['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/v1.0/image/celebrity-recognition', method='POST', path_params=path_params, query_params=query_params,", "datetime import re import importlib import six from huaweicloudsdkcore.client import", "if clazz.__name__ != \"ImageClient\": raise TypeError(\"client type error, support client", "RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\" all_params = ['body'] local_var_params =", "\"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\" all_params =", "the url. :param header_params: Header parameters to be placed in", "coding: utf-8 from __future__ import absolute_import import datetime import re", "from huaweicloudsdkcore.exceptions import exceptions from huaweicloudsdkcore.utils import http_utils from huaweicloudsdkcore.sdk_stream_request", "auth_settings = [] return self.call_api( resource_path='/v1.0/image/tagging', method='POST', path_params=path_params, query_params=query_params, header_params=header_params,", "response_type='RunImageTaggingResponse', response_headers=response_headers, auth_settings=auth_settings, collection_formats=collection_formats, request_type=request.__class__.__name__) def call_api(self, resource_path, method, path_params=None,", "\"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return: RunImageTaggingResponse \"\"\" return self.run_image_tagging_with_http_info(request)", "from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest class ImageClient(Client): \"\"\" :param configuration: .Configuration", "call. :param path_params: Path parameters in the url. :param query_params:", "new_builder(cls, clazz=None): if clazz is None: return ClientBuilder(cls) if clazz.__name__", "request :return: RunImageTaggingResponse \"\"\" return self.run_image_tagging_with_http_info(request) def run_image_tagging_with_http_info(self, request): \"\"\"图像标签", "path_params=None, query_params=None, header_params=None, body=None, post_params=None, response_type=None, response_headers=None, auth_settings=None, collection_formats=None, request_type=None):", "in the url. :param header_params: Header parameters to be placed", "header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings = [] return self.call_api( resource_path='/v1.0/image/tagging',", "auth_settings list: Auth Settings names for the request. :param response_type:", ":return: RunCelebrityRecognitionResponse \"\"\" all_params = ['body'] local_var_params = {} for", "request and returns deserialized data. :param resource_path: Path to method", "`multipart/form-data`. :param auth_settings list: Auth Settings names for the request.", "def run_image_tagging_with_http_info(self, request): \"\"\"图像标签 自然图像的语义内容非常丰富,一个图像包含多个标签内容,图像标签服务准确识别自然图片中数百种场景、上千种通用物体及其属性,让智能相册管理、照片检索和分类、基于场景内容或者物体的广告推荐等功能更加直观。使用时用户发送待处理图片,返回图片标签内容及相应置信度。 :param RunImageTaggingRequest request :return: RunImageTaggingResponse", "= request.get_file_stream() response_headers = [] header_params['Content-Type'] = http_utils.select_header_content_type( ['application/json;charset=UTF-8']) auth_settings", "else long, 'float': float, 'str': str, 'bool': bool, 'date': datetime.date,", "\"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。 :param RunCelebrityRecognitionRequest request :return: RunCelebrityRecognitionResponse \"\"\" return self.run_celebrity_recognition_with_http_info(request)", "be added to response data. :param collection_formats: dict of collection", "type is ImageClient\") return ClientBuilder(clazz) def run_celebrity_recognition(self, request): \"\"\"名人识别 分析并识别图片中包含的政治人物、明星及网红人物,返回人物信息及人脸坐标。", "more concurrent API requests. \"\"\" PRIMITIVE_TYPES = (float, bool, bytes,", "NATIVE_TYPES_MAPPING = { 'int': int, 'long': int if six.PY3 else" ]
[ "= False, decreasing: bool = False, multi_spikes_threshold: float = .7,", "Improvement Objective'] = (self.diagnostics.loc[ self.iter - 1, 'Objective Function'] -", "after iteration Returns ------- \"\"\" if self.iter == 0: self.diagnostics", "= init_reweighting_prec self.init_reweighting_prec = init_reweighting_prec self.decreasing = decreasing self.final_reweighting_prec =", "* self.bound if self.decreasing: self.reweighting_prec = self.init_reweighting_prec / (self.iter +", "must be in ['optimal', 'regular']\") super(VanillaFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor,", "np.dot(self.data, column) / (np.linalg.norm(column, 2) ** 2) overvalue = np.abs(iterand)", "self.candidate_new.append(indices.shape[0]) self.actual_new.append(self.new_ind.size) if len(self.new_ind) == 0: self.new_ind = None self.dual_certificate_value", "verbose=verbose, remember_iterand=remember_iterand, multi_spikes=False, t_max=t_max) def combine_new_impulse(self) -> Any: iterand =", "2 / (2 * self.lambda_) self.start = None if verbose", "self.new_ind] * self.bound * np.sign( self.dual_certificate_value) - self.forwardOp @ iterand,", "* self.bound return {'iterand': iterand, 'positions': new_positions} class FullyCorrectiveFWSolverForLasso(VanillaFWSolverForLasso): def", "self.dual_certificate_value) - self.forwardOp @ iterand, 2) ** 2 else: gamma", "np.unique(np.hstack([iterand.nonzero()[0], self.new_ind])) else: active_indices = new_positions else: new_positions = self.old_iterand['positions']", "self.reweighting == 'ista': tau = 1.9 / restricted_data_fidelity.diff_lipschitz_cst acceleration =", "self.iter, 'Objective Function']) / \\ self.diagnostics.loc[ self.iter - 1, 'Objective", "= (1 / 2) * SquaredL2Loss(dim=restricted_forward.shape[0], data=self.data) \\ * restricted_forward", "1.9 / restricted_data_fidelity.diff_lipschitz_cst acceleration = None else: raise ValueError('Reweighting strategy", "np.dot(self.data, column) / (np.linalg.norm(column, 2) ** 2) self.last_weight = iterand[active_indices]", "gamma = 2/(self.iter + 3) else: new_positions = self.old_iterand['positions'] if", "else: active_indices = new_positions if active_indices.shape[0] > 1: x0 =", "reweighting: str = 'ista', t_max: float = None): self.data =", "l22_loss * self.forwardOp if lambda_ is None: lambda_ = lambda_factor", "iterand[active_indices] overvalue = np.abs(iterand) > self.bound if overvalue.sum() > 0:", "Optional[int] = 10, remove_positions: bool = False, remember_iterand: bool =", "if verbose is not None: self.candidate_new = [] self.actual_new =", "'Relative Improvement Objective', 'Relative Improvement Iterand', 'Dual Certificate Value', 'Objective", "np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float] = None, lambda_factor: Optional[float] =", "min_iter=1) return injection(solver.iterate()[0]['iterand']) class VanillaFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp:", "2) * SquaredL2Loss(dim=restricted_forward.shape[0], data=self.data) \\ * restricted_forward # restricted_data_fidelity.lipschitz_cst =", "1) gamma /= np.linalg.norm(self.forwardOp @ iterand, 2) ** 2 else:", "self.new_ind is not None: self.last_weight = iterand[self.new_ind] else: tmp =", "np.abs(iterand) > self.bound if overvalue.sum() > 0: print(\"Overvalue at coordinates", "75.): if x0 is None: x0 = np.zeros(active_indices.shape) injection =", "else: gamma = 2/(self.iter + 3) else: new_positions = self.old_iterand['positions']", "= np.where(d > max(threshold, 1.))[0] # print(\"Threshold: {} / {}\".format(threshold,", "restricted_forward = pl.DenseLinearOperator( self.forwardOp.mat[:, active_indices]) restricted_forward.compute_lipschitz_cst(tol=1e-3) restricted_data_fidelity = (1 /", "self.reweighting == 'fista': acceleration = 'CD' tau = None elif", "accuracy_threshold: float = 1e-4, verbose: Optional[int] = 10, remember_iterand: bool", "self.old_iterand['positions'] if self.iter > 0 and self.remove_positions: active_indices = np.unique(iterand.nonzero()[0])", "= np.dot(self.data, column) / (np.linalg.norm(column, 2) ** 2) self.last_weight =", "* maxi self.epsilon = (1 - self.multi_spikes_threshold) * maxi else:", "float = None): self.remove_positions = remove_positions self.reweighting_prec = init_reweighting_prec self.init_reweighting_prec", "def stopping_metric(self): if self.iter == 0: return np.infty elif self.stopping_strategy", "np.zeros(self.dim) tmp[active_indices] = 1. column = self.forwardOp(tmp) iterand[active_indices] = np.dot(self.data,", "* self.epsilon indices = np.where(d > max(threshold, 1.))[0] # print(\"Threshold:", "np.unique(iterand.nonzero()[0]) else: active_indices = new_positions if active_indices.shape[0] > 1: x0", "= 'fista', t_max: float = None): self.remove_positions = remove_positions self.reweighting_prec", "class FullyCorrectiveFWSolverForLasso(VanillaFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float]", "strategy must be in [\"fista\", \"ista\"]') solver = APGD(dim=restricted_data_fidelity.shape[1], F=restricted_data_fidelity,", "/ 2) * SquaredL2Loss(dim=restricted_forward.shape[0], data=self.data) \\ * restricted_forward # restricted_data_fidelity.lipschitz_cst", "'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ * ( 1.", "\\ self.diagnostics.loc[ self.iter - 1, 'Objective Function'] if self.remember_iterand: self.iterand_history.append(self.iterand['iterand'])", "*= (1 - gamma) if self.new_ind is not None: iterand[self.new_ind]", "= self.forwardOp.shape[1] self.x0 = np.zeros(self.dim) self.dual_certificate_value = 1 / lambda_factor", "elif self.stopping_strategy == 'relative_improvement': return abs(self.diagnostics.loc[self.iter - 1, 'Relative Improvement", "{}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound if self.decreasing: self.reweighting_prec =", "self.multi_spikes_threshold) * maxi else: threshold = maxi - (1 /", "dual_certificate[self.new_ind] if self.new_ind in self.old_iterand['positions']: self.new_ind = None # already", "else: self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] = (self.diagnostics.loc[ self.iter - 1,", "is not None: self.last_weight = iterand[self.new_ind] else: tmp = np.zeros(self.dim)", "+ (np.abs(self.dual_certificate_value) - 1.) * self.bound) gamma /= np.linalg.norm(self.forwardOp.mat[:, self.new_ind]", "1) self.reweighting_prec = max(self.reweighting_prec, self.final_reweighting_prec) return {'iterand': iterand, 'positions': new_positions}", "restricted_forward # restricted_data_fidelity.lipschitz_cst = self.data_fidelity.lipschitz_cst # restricted_data_fidelity.diff_lipschitz_cst = self.data_fidelity.diff_lipschitz_cst restricted_regularization", "Improvement Iterand', 'Dual Certificate Value', 'Objective Function']) self.diagnostics.loc[self.iter, 'Iter'] =", "int = 500, stopping_strategy: str = 'certificate', accuracy_threshold: float =", "2)) * self.epsilon indices = np.where(d > max(threshold, 1.))[0] #", "acceleration = 'CD' tau = None elif self.reweighting == 'ista':", "verbose=None, accuracy_threshold=accuracy, d=d, max_iter=2000, min_iter=1) return injection(solver.iterate()[0]['iterand']) class VanillaFWSolverForLasso(GenericFWSolverForLasso): def", "= 1e-4, reweighting: str = 'fista', t_max: float = None):", "iterand) + self.lambda_ * ( 1. * np.linalg.norm(iterand, 1) +", "multi_spikes_threshold self.reweighting = reweighting self.remove_positions = remove_positions self.decreasing = decreasing", "= np.argmax(d) self.dual_certificate_value = dual_certificate[self.new_ind] if self.new_ind in self.old_iterand['positions']: self.new_ind", "self).__init__(objective_functional=objective_functional, init_iterand=init_iterand, max_iter=max_iter, min_iter=min_iter, accuracy_threshold=accuracy_threshold, verbose=verbose, t_max=t_max) def update_iterand(self) ->", "def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float] = None,", "self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] = np.linalg.norm( self.old_iterand['iterand'] - self.iterand['iterand']) /", "max_iter=2000, min_iter=1) return injection(solver.iterate()[0]['iterand']) class VanillaFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray,", "- self.forwardOp @ iterand, 2) ** 2 else: gamma =", "active_indices.shape[0] > 1: iterand[self.new_ind] = np.sign(self.dual_certificate_value) * self.last_weight x0 =", "max_iter: int = 500, stopping_strategy: str = 'certificate', accuracy_threshold: float", "None: new_positions = np.hstack([self.old_iterand['positions'], self.new_ind]) if self.step_size == 'optimal': gamma", "abs(abs(value) - 1) else: raise ValueError('Stopping strategy must be in", "decreasing: bool = False, multi_spikes_threshold: float = .7, t_max: float", "iterand[self.new_ind] = np.sign(self.dual_certificate_value) * self.last_weight x0 = iterand[active_indices] iterand =", "self.data = data self.forwardOp = forwardOp self.stopping_strategy = stopping_strategy self.accuracy_threshold", "0: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] = np.infty else: self.diagnostics.loc[self.iter, 'Relative", "False, decreasing: bool = False, multi_spikes_threshold: float = .7, multi_spikes:", "@ iterand, 2) ** 2 else: gamma = 2/(self.iter +", "restricted_data_fidelity = (1 / 2) * SquaredL2Loss(dim=restricted_forward.shape[0], data=self.data) \\ *", "elif self.reweighting == 'ista': tau = 1.9 / restricted_data_fidelity.diff_lipschitz_cst acceleration", "import SquaredL2Loss from pycsou.opt.proxalgs import APGD class GenericFWSolverForLasso(TimedGenericIterativeAlgorithm): def __init__(self,", "= 'ista', t_max: float = None): self.data = data self.forwardOp", "= new_positions if active_indices.shape[0] > 1: x0 = iterand[active_indices] iterand", "None): self.remove_positions = remove_positions self.reweighting_prec = init_reweighting_prec self.init_reweighting_prec = init_reweighting_prec", "0: threshold = self.multi_spikes_threshold * maxi self.epsilon = (1 -", "PolyatomicFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float] =", "= DataFrame( columns=['Iter', 'Relative Improvement Objective', 'Relative Improvement Iterand', 'Dual", "= [] self.actual_new = [] super(GenericFWSolverForLasso, self).__init__(objective_functional=objective_functional, init_iterand=init_iterand, max_iter=max_iter, min_iter=min_iter,", "utils import TimedGenericIterativeAlgorithm import pycsou.core as pcore import pycsou.linop as", "lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=False, t_max=t_max)", "multi_spikes_threshold: float = .7, t_max: float = None): self.remove_positions =", "bool = False, final_reweighting_prec: float = 1e-4, init_reweighting_prec: float =", "x0: np.ndarray = None, d: float = 75.): if x0", "stopping_strategy: str = 'certificate', accuracy_threshold: float = 1e-4, verbose: Optional[int]", "= self.data_fidelity + self.penalty self.bound = np.linalg.norm(self.data) ** 2 /", "tau = 1.9 / restricted_data_fidelity.diff_lipschitz_cst acceleration = None else: raise", "== 0: return np.infty elif self.stopping_strategy == 'relative_improvement': return abs(self.diagnostics.loc[self.iter", "self.new_ind is not None: new_positions = np.hstack([self.old_iterand['positions'], self.new_ind]) if self.step_size", "assume_unique=True) if self.verbose is not None: self.candidate_new.append(indices.shape[0]) self.actual_new.append(self.new_ind.size) if len(self.new_ind)", "= None): self.remove_positions = remove_positions self.reweighting_prec = reweighting_prec super(FullyCorrectiveFWSolverForLasso, self).__init__(data,", "= self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) else: tmp = np.zeros(self.dim) tmp[active_indices] =", "self.bound return {'iterand': iterand, 'positions': new_positions} class PolyatomicFWSolverForLasso(GenericFWSolverForLasso): def __init__(self,", "= None if verbose is not None: self.candidate_new = []", "data=self.data) \\ * restricted_forward # restricted_data_fidelity.lipschitz_cst = self.data_fidelity.lipschitz_cst # restricted_data_fidelity.diff_lipschitz_cst", "import pycsou.linop as pl from pycsou.func.penalty import L1Norm from pycsou.func.loss", "(1 - self.multi_spikes_threshold) * maxi else: threshold = maxi -", "stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, t_max=t_max) self.reweighting = reweighting self.last_weight =", "self.lambda_ * ( 1. * np.linalg.norm(iterand, 1) + (np.abs(self.dual_certificate_value) -", "from abc import abstractmethod from utils import TimedGenericIterativeAlgorithm import pycsou.core", "3) if not 0 < gamma < 1: gamma =", "= np.abs(dual_certificate) if self.multi_spikes: maxi = np.max(d) if self.iter ==", "else: new_positions = self.old_iterand['positions'] if self.step_size == 'optimal': gamma =", "'ista', t_max: float = None): self.data = data self.forwardOp =", "tau=tau, acceleration=acceleration, verbose=None, accuracy_threshold=accuracy, d=d, max_iter=2000, min_iter=1) return injection(solver.iterate()[0]['iterand']) class", "None: self.candidate_new = [] self.actual_new = [] super(GenericFWSolverForLasso, self).__init__(objective_functional=objective_functional, init_iterand=init_iterand,", "__init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float] = None, lambda_factor:", "as np from typing import Optional, Any from pandas import", "Improvement Objective'] = np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] =", "size strategy must be in ['optimal', 'regular']\") super(VanillaFWSolverForLasso, self).__init__(data, forwardOp,", "self.x0, 'positions': np.array([], dtype=int)} l22_loss = (1 / 2) *", "self.lambda_ * L1Norm(dim=self.dim) objective_functional = self.data_fidelity + self.penalty self.bound =", "/ (np.linalg.norm(column, 2) ** 2) overvalue = np.abs(iterand) > self.bound", "= init_reweighting_prec self.decreasing = decreasing self.final_reweighting_prec = final_reweighting_prec super(PolyatomicFWSolverForLasso, self).__init__(data,", "np.unique(iterand.nonzero()[0]) else: active_indices = new_positions if active_indices.shape[0] > 1: iterand[self.new_ind]", "super(VanillaFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose,", "gamma < 1: gamma = np.clip(gamma, 0., 1.) iterand *=", "DataFrame( columns=['Iter', 'Relative Improvement Objective', 'Relative Improvement Iterand', 'Dual Certificate", "been triggered in practice print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] =", "gamma /= np.linalg.norm(self.forwardOp.mat[:, self.new_ind] * self.bound * np.sign( self.dual_certificate_value) -", "return abs(self.diagnostics.loc[self.iter - 1, 'Relative Improvement Objective']) elif self.stopping_strategy ==", "import abstractmethod from utils import TimedGenericIterativeAlgorithm import pycsou.core as pcore", "self.step_size == 'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ *", "= False, remember_iterand: bool = False, decreasing: bool = False,", "= {'iterand': self.x0, 'positions': np.array([], dtype=int)} l22_loss = (1 /", "- self.diagnostics.loc[ self.iter, 'Objective Function']) / \\ self.diagnostics.loc[ self.iter -", "* ( 1. * np.linalg.norm(iterand, 1) + (np.abs(self.dual_certificate_value) - 1.)", "pcore.linop.LinearOperator, lambda_: Optional[float] = None, lambda_factor: Optional[float] = 0.1, min_iter:", "coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound return {'iterand': iterand,", "if overvalue.sum() > 0: #Sanity check, never been triggered in", "remember_iterand: bool = False, remove_positions: bool = False, reweighting_prec: float", "self.iter > 0 and self.remove_positions: active_indices = np.unique(iterand.nonzero()[0]) else: active_indices", "= None, lambda_factor: Optional[float] = 0.1, min_iter: int = 10,", "= np.linalg.norm(self.data) ** 2 / (2 * self.lambda_) self.start =", "np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ * np.linalg.norm(iterand, 1) gamma /= np.linalg.norm(self.forwardOp", "practice print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound", "numpy as np from typing import Optional, Any from pandas", "if active_indices.shape[0] > 1: iterand[self.new_ind] = np.sign(self.dual_certificate_value) * self.last_weight x0", "tmp = np.zeros(self.dim) tmp[active_indices] = 1. column = self.forwardOp(tmp) iterand[active_indices]", "1.: if self.verbose is not None: print('Warning, dual certificate lower", "and self.remove_positions: active_indices = np.unique(np.hstack([iterand.nonzero()[0], self.new_ind])) else: active_indices = new_positions", "self.step_size = step_size else: raise ValueError(\"Step size strategy must be", "import pycsou.core as pcore import pycsou.linop as pl from pycsou.func.penalty", "= accuracy_threshold self.multi_spikes = multi_spikes self.multi_spikes_threshold = multi_spikes_threshold self.reweighting =", "present position if abs(self.dual_certificate_value) < 1.: if self.verbose is not", "= False, final_reweighting_prec: float = 1e-4, init_reweighting_prec: float = .2,", "remember_iterand=remember_iterand, multi_spikes=True, multi_spikes_threshold=multi_spikes_threshold, reweighting='ista', t_max=t_max) def combine_new_impulse(self): iterand = deepcopy(self.old_iterand['iterand'])", "(np.linalg.norm(column, 2) ** 2) overvalue = np.abs(iterand) > self.bound if", "np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] = (self.diagnostics.loc[ self.iter -", "new_positions = self.old_iterand['positions'] if self.iter > 0 and self.remove_positions: active_indices", "= self.bound def combine_new_impulse(self) -> Any: iterand = deepcopy(self.old_iterand['iterand']) if", "np.linalg.norm(iterand, 1) gamma /= np.linalg.norm(self.forwardOp @ iterand, 2) ** 2", "- 1) else: raise ValueError('Stopping strategy must be in [\"relative_improvement\",", "maxi else: threshold = maxi - (1 / (self.iter +", "(1 / 2) * SquaredL2Loss(dim=restricted_forward.shape[0], data=self.data) \\ * restricted_forward #", "iterand[self.new_ind] else: tmp = np.zeros(self.dim) tmp[active_indices] = 1. column =", "import deepcopy from abc import abstractmethod from utils import TimedGenericIterativeAlgorithm", "self.reweighting = reweighting self.remove_positions = remove_positions self.decreasing = decreasing self.dim", "if self.new_ind in self.old_iterand['positions']: self.new_ind = None # already present", "= iterand[active_indices] overvalue = np.abs(iterand) > self.bound if overvalue.sum() >", "3) else: new_positions = self.old_iterand['positions'] if self.step_size == 'optimal': gamma", "iterand[active_indices] = np.dot(self.data, column) / (np.linalg.norm(column, 2) ** 2) self.last_weight", "{'iterand': self.x0, 'positions': np.array([], dtype=int)} l22_loss = (1 / 2)", "if self.iter == 0: self.diagnostics = DataFrame( columns=['Iter', 'Relative Improvement", "t_max=t_max) def combine_new_impulse(self): iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind is not", "from pycsou.opt.proxalgs import APGD class GenericFWSolverForLasso(TimedGenericIterativeAlgorithm): def __init__(self, data: np.ndarray,", "/ \\ self.diagnostics.loc[ self.iter - 1, 'Objective Function'] if self.remember_iterand:", "float = None): if step_size in ['optimal', 'regular']: self.step_size =", "reweighting_prec super(FullyCorrectiveFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold,", "= False, reweighting_prec: float = 1e-4, reweighting: str = 'fista',", "remove_positions: bool = False, remember_iterand: bool = False, final_reweighting_prec: float", "init_reweighting_prec self.init_reweighting_prec = init_reweighting_prec self.decreasing = decreasing self.final_reweighting_prec = final_reweighting_prec", "in ['optimal', 'regular']\") super(VanillaFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter,", "Optional[float] = None, lambda_factor: Optional[float] = 0.1, min_iter: int =", "= False, multi_spikes_threshold: float = .7, multi_spikes: bool = True,", "< gamma < 1: gamma = np.clip(gamma, 0., 1.) iterand", "if overvalue.sum() > 0: print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] =", "iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) else: tmp = np.zeros(self.dim) tmp[active_indices]", "self.decreasing: self.reweighting_prec = self.init_reweighting_prec / (self.iter + 1) self.reweighting_prec =", "= self.iter if np.linalg.norm(self.old_iterand['iterand']) == 0: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand']", "np from typing import Optional, Any from pandas import DataFrame", "= self.data_fidelity.lipschitz_cst # restricted_data_fidelity.diff_lipschitz_cst = self.data_fidelity.diff_lipschitz_cst restricted_regularization = self.lambda_ *", "= np.clip(gamma, 0., 1.) iterand *= (1 - gamma) if", "None: iterand[self.new_ind] += gamma * np.sign(self.dual_certificate_value) * self.bound return {'iterand':", "Value', 'Objective Function']) self.diagnostics.loc[self.iter, 'Iter'] = self.iter if np.linalg.norm(self.old_iterand['iterand']) ==", "not 0 < gamma < 1: gamma = np.clip(gamma, 0.,", "return {'iterand': iterand, 'positions': new_positions} class PolyatomicFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data:", "verbose=verbose, remember_iterand=remember_iterand, multi_spikes=True, multi_spikes_threshold=multi_spikes_threshold, reweighting='ista', t_max=t_max) def combine_new_impulse(self): iterand =", "min_iter: int = 10, max_iter: int = 500, stopping_strategy: str", "indices = np.where(d > max(threshold, 1.))[0] # print(\"Threshold: {} /", "forwardOp self.stopping_strategy = stopping_strategy self.accuracy_threshold = accuracy_threshold self.multi_spikes = multi_spikes", "None, lambda_factor: Optional[float] = 0.1, min_iter: int = 10, max_iter:", "- self.iterand['iterand']) / np.linalg.norm( self.old_iterand['iterand']) self.diagnostics.loc[self.iter, 'Dual Certificate Value'] =", "np.zeros(self.dim) self.dual_certificate_value = 1 / lambda_factor self.new_ind = None self.epsilon", "= np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] = (self.diagnostics.loc[ self.iter", "'Relative Improvement Objective'] = (self.diagnostics.loc[ self.iter - 1, 'Objective Function']", "self.old_iterand['iterand']) self.diagnostics.loc[self.iter, 'Dual Certificate Value'] = self.dual_certificate_value # before iteration", "= np.sign(self.dual_certificate_value) * self.last_weight x0 = iterand[active_indices] iterand = self.restricted_support_lasso(active_indices,", "maxi - (1 / (self.iter + 2)) * self.epsilon indices", "float = 75.): if x0 is None: x0 = np.zeros(active_indices.shape)", "lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, t_max=t_max) self.reweighting", "np.linalg.norm(self.old_iterand['iterand']) == 0: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] = np.infty else:", "= deepcopy(self.old_iterand['iterand']) if self.new_ind is not None: new_positions = np.hstack([self.old_iterand['positions'],", "multi_spikes: bool = True, reweighting: str = 'ista', t_max: float", "True, reweighting: str = 'ista', t_max: float = None): self.data", "self.forwardOp if lambda_ is None: lambda_ = lambda_factor * np.abs(self.forwardOp.adjoint(self.data)).max()", "raise ValueError('Stopping strategy must be in [\"relative_improvement\", \"certificate\"]') def restricted_support_lasso(self,", "= self.init_reweighting_prec / (self.iter + 1) self.reweighting_prec = max(self.reweighting_prec, self.final_reweighting_prec)", "x0 = iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) if self.new_ind", "SquaredL2Loss from pycsou.opt.proxalgs import APGD class GenericFWSolverForLasso(TimedGenericIterativeAlgorithm): def __init__(self, data:", "Improvement Iterand'] = np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] =", "= False, multi_spikes_threshold: float = .7, t_max: float = None):", "[] self.actual_new = [] super(GenericFWSolverForLasso, self).__init__(objective_functional=objective_functional, init_iterand=init_iterand, max_iter=max_iter, min_iter=min_iter, accuracy_threshold=accuracy_threshold,", "= self.forwardOp(tmp) iterand[active_indices] = np.dot(self.data, column) / (np.linalg.norm(column, 2) **", "= 1e-4, verbose: Optional[int] = 10, remove_positions: bool = False,", "restricted_data_fidelity.diff_lipschitz_cst = self.data_fidelity.diff_lipschitz_cst restricted_regularization = self.lambda_ * L1Norm(dim=restricted_data_fidelity.shape[1]) if self.reweighting", "check, never been triggered in practice print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue]))", "1, 'Objective Function'] if self.remember_iterand: self.iterand_history.append(self.iterand['iterand']) def print_diagnostics(self): print(dict(self.diagnostics.loc[self.iter])) def", "None elif self.reweighting == 'ista': tau = 1.9 / restricted_data_fidelity.diff_lipschitz_cst", "2) ** 2 else: gamma = 2/(self.iter + 3) else:", "iterand, 'positions': new_positions} class FullyCorrectiveFWSolverForLasso(VanillaFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp:", "0: self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] = np.infty else: self.diagnostics.loc[self.iter, 'Relative", "solver = APGD(dim=restricted_data_fidelity.shape[1], F=restricted_data_fidelity, G=restricted_regularization, x0=x0, tau=tau, acceleration=acceleration, verbose=None, accuracy_threshold=accuracy,", "F=restricted_data_fidelity, G=restricted_regularization, x0=x0, tau=tau, acceleration=acceleration, verbose=None, accuracy_threshold=accuracy, d=d, max_iter=2000, min_iter=1)", "Optional[int] = 10, remember_iterand: bool = False, step_size: str =", "{'iterand': iterand, 'positions': new_positions} class FullyCorrectiveFWSolverForLasso(VanillaFWSolverForLasso): def __init__(self, data: np.ndarray,", "if self.new_ind is not None: self.last_weight = iterand[self.new_ind] else: tmp", "from pandas import DataFrame from copy import deepcopy from abc", "is computed after iteration Returns ------- \"\"\" if self.iter ==", "= np.abs(iterand) > self.bound if overvalue.sum() > 0: #Sanity check,", "is not None: self.candidate_new = [] self.actual_new = [] super(GenericFWSolverForLasso,", "else: raise ValueError('Reweighting strategy must be in [\"fista\", \"ista\"]') solver", "if not 0 < gamma < 1: gamma = np.clip(gamma,", "lambda_ self.penalty = self.lambda_ * L1Norm(dim=self.dim) objective_functional = self.data_fidelity +", "= new_positions else: new_positions = self.old_iterand['positions'] if self.iter > 0", "self.last_weight = iterand[self.new_ind] else: tmp = np.zeros(self.dim) tmp[active_indices] = 1.", "** 2 / (2 * self.lambda_) self.start = None if", "= reweighting self.remove_positions = remove_positions self.decreasing = decreasing self.dim =", "\\ * restricted_forward # restricted_data_fidelity.lipschitz_cst = self.data_fidelity.lipschitz_cst # restricted_data_fidelity.diff_lipschitz_cst =", "import L1Norm from pycsou.func.loss import SquaredL2Loss from pycsou.opt.proxalgs import APGD", "raise ValueError(\"Step size strategy must be in ['optimal', 'regular']\") super(VanillaFWSolverForLasso,", "= None else: raise ValueError('Reweighting strategy must be in [\"fista\",", "self.iter == 0: self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] = np.infty else:", "self.dual_certificate_value # before iteration self.diagnostics.loc[self.iter, 'Objective Function'] = self.objective_functional(self.iterand['iterand']) if", "= np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ * np.linalg.norm(iterand, 1) gamma /=", "= l22_loss * self.forwardOp if lambda_ is None: lambda_ =", "False, step_size: str = 'optimal', t_max: float = None): if", "= self.diagnostics.loc[self.iter - 1, 'Dual Certificate Value'] return abs(abs(value) -", "self.init_reweighting_prec / (self.iter + 1) self.reweighting_prec = max(self.reweighting_prec, self.final_reweighting_prec) return", "active_indices]) restricted_forward.compute_lipschitz_cst(tol=1e-3) restricted_data_fidelity = (1 / 2) * SquaredL2Loss(dim=restricted_forward.shape[0], data=self.data)", "self.iterand['iterand']) / np.linalg.norm( self.old_iterand['iterand']) self.diagnostics.loc[self.iter, 'Dual Certificate Value'] = self.dual_certificate_value", "1 / lambda_factor self.new_ind = None self.epsilon = None self.remember_iterand", "self.start = None if verbose is not None: self.candidate_new =", "injection(solver.iterate()[0]['iterand']) class VanillaFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_:", "verbose=verbose, t_max=t_max) def update_iterand(self) -> Any: self.compute_new_impulse() res = self.combine_new_impulse()", "10, remember_iterand: bool = False, step_size: str = 'optimal', t_max:", "0: self.new_ind = None self.dual_certificate_value = max(dual_certificate.min(), dual_certificate.max(), key=abs) else:", "step_size else: raise ValueError(\"Step size strategy must be in ['optimal',", "str = 'fista', t_max: float = None): self.remove_positions = remove_positions", "= 1e-4, init_reweighting_prec: float = .2, decreasing: bool = False,", "abstractmethod from utils import TimedGenericIterativeAlgorithm import pycsou.core as pcore import", "print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound if", "= pl.DenseLinearOperator( self.forwardOp.mat[:, active_indices]) restricted_forward.compute_lipschitz_cst(tol=1e-3) restricted_data_fidelity = (1 / 2)", "= remove_positions self.reweighting_prec = reweighting_prec super(FullyCorrectiveFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor,", "restricted_data_fidelity.diff_lipschitz_cst acceleration = None else: raise ValueError('Reweighting strategy must be", "'regular']: self.step_size = step_size else: raise ValueError(\"Step size strategy must", "self.final_reweighting_prec = final_reweighting_prec super(PolyatomicFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter,", "'CD' tau = None elif self.reweighting == 'ista': tau =", "'Relative Improvement Iterand'] = np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand']", "Objective'] = (self.diagnostics.loc[ self.iter - 1, 'Objective Function'] - self.diagnostics.loc[", "* self.bound) gamma /= np.linalg.norm(self.forwardOp.mat[:, self.new_ind] * self.bound * np.sign(", "> 0: #Sanity check, never been triggered in practice print(\"Overvalue", "gamma = np.clip(gamma, 0., 1.) iterand *= (1 - gamma)", "tmp[active_indices] = 1. column = self.forwardOp(tmp) iterand[active_indices] = np.dot(self.data, column)", "None): self.remove_positions = remove_positions self.reweighting_prec = reweighting_prec super(FullyCorrectiveFWSolverForLasso, self).__init__(data, forwardOp,", "else: gamma = 2/(self.iter + 3) if not 0 <", "1.) * self.bound) gamma /= np.linalg.norm(self.forwardOp.mat[:, self.new_ind] * self.bound *", "maxi = np.max(d) if self.iter == 0: threshold = self.multi_spikes_threshold", "max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, t_max=t_max) self.reweighting = reweighting self.last_weight", "0 and self.remove_positions: active_indices = np.unique(iterand.nonzero()[0]) else: active_indices = new_positions", "overvalue = np.abs(iterand) > self.bound if overvalue.sum() > 0: #Sanity", "= self.dual_certificate_value # before iteration self.diagnostics.loc[self.iter, 'Objective Function'] = self.objective_functional(self.iterand['iterand'])", "'Relative Improvement Objective'] = np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement Objective']", "= final_reweighting_prec super(PolyatomicFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy,", "self.lambda_ * L1Norm(dim=restricted_data_fidelity.shape[1]) if self.reweighting == 'fista': acceleration = 'CD'", "if abs(self.dual_certificate_value) < 1.: if self.verbose is not None: print('Warning,", "print_diagnostics(self): print(dict(self.diagnostics.loc[self.iter])) def stopping_metric(self): if self.iter == 0: return np.infty", "+ 3) if not 0 < gamma < 1: gamma", "as pcore import pycsou.linop as pl from pycsou.func.penalty import L1Norm", "np.linalg.norm( self.old_iterand['iterand'] - self.iterand['iterand']) / np.linalg.norm( self.old_iterand['iterand']) self.diagnostics.loc[self.iter, 'Dual Certificate", "1e-4, reweighting: str = 'fista', t_max: float = None): self.remove_positions", "stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=True, multi_spikes_threshold=multi_spikes_threshold, reweighting='ista', t_max=t_max) def combine_new_impulse(self):", "as pl from pycsou.func.penalty import L1Norm from pycsou.func.loss import SquaredL2Loss", "t_max: float = None): self.remove_positions = remove_positions self.reweighting_prec = init_reweighting_prec", "if self.multi_spikes: maxi = np.max(d) if self.iter == 0: threshold", "self.remove_positions = remove_positions self.reweighting_prec = init_reweighting_prec self.init_reweighting_prec = init_reweighting_prec self.decreasing", "bool = False, multi_spikes_threshold: float = .7, t_max: float =", "remove_positions: bool = False, remember_iterand: bool = False, decreasing: bool", "accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=False, t_max=t_max) def combine_new_impulse(self) -> Any: iterand", "str = 'certificate', accuracy_threshold: float = 1e-4, verbose: Optional[int] =", "APGD class GenericFWSolverForLasso(TimedGenericIterativeAlgorithm): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_:", "= iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) if self.new_ind is", "* self.lambda_) self.start = None if verbose is not None:", "1, 'Dual Certificate Value'] return abs(abs(value) - 1) else: raise", "self.data_fidelity.lipschitz_cst # restricted_data_fidelity.diff_lipschitz_cst = self.data_fidelity.diff_lipschitz_cst restricted_regularization = self.lambda_ * L1Norm(dim=restricted_data_fidelity.shape[1])", "self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] = (self.diagnostics.loc[ self.iter - 1, 'Objective", "self.reweighting_prec = self.init_reweighting_prec / (self.iter + 1) self.reweighting_prec = max(self.reweighting_prec,", "float = 1e-4, reweighting: str = 'fista', t_max: float =", "dtype=float).get_adjointOp() restricted_forward = pl.DenseLinearOperator( self.forwardOp.mat[:, active_indices]) restricted_forward.compute_lipschitz_cst(tol=1e-3) restricted_data_fidelity = (1", "self.stopping_strategy == 'relative_improvement': return abs(self.diagnostics.loc[self.iter - 1, 'Relative Improvement Objective'])", "'Iter'] = self.iter if np.linalg.norm(self.old_iterand['iterand']) == 0: self.diagnostics.loc[self.iter, 'Relative Improvement", "d=d, max_iter=2000, min_iter=1) return injection(solver.iterate()[0]['iterand']) class VanillaFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data:", "is not None: self.candidate_new.append(indices.shape[0]) self.actual_new.append(self.new_ind.size) if len(self.new_ind) == 0: self.new_ind", "max(threshold, 1.))[0] # print(\"Threshold: {} / {}\".format(threshold, maxi)) # print('Candidate", "-> Any: pass def update_diagnostics(self): \"\"\" Dual ceritificate value is", "[] init_iterand = {'iterand': self.x0, 'positions': np.array([], dtype=int)} l22_loss =", "Value'] = self.dual_certificate_value # before iteration self.diagnostics.loc[self.iter, 'Objective Function'] =", "= APGD(dim=restricted_data_fidelity.shape[1], F=restricted_data_fidelity, G=restricted_regularization, x0=x0, tau=tau, acceleration=acceleration, verbose=None, accuracy_threshold=accuracy, d=d,", "self.accuracy_threshold = accuracy_threshold self.multi_spikes = multi_spikes self.multi_spikes_threshold = multi_spikes_threshold self.reweighting", "Function'] if self.remember_iterand: self.iterand_history.append(self.iterand['iterand']) def print_diagnostics(self): print(dict(self.diagnostics.loc[self.iter])) def stopping_metric(self): if", "be in [\"fista\", \"ista\"]') solver = APGD(dim=restricted_data_fidelity.shape[1], F=restricted_data_fidelity, G=restricted_regularization, x0=x0,", "= self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) if self.new_ind is not None: self.last_weight", "gamma = 2/(self.iter + 3) if not 0 < gamma", "np.clip(gamma, 0., 1.) iterand *= (1 - gamma) if self.new_ind", "= remove_positions self.reweighting_prec = init_reweighting_prec self.init_reweighting_prec = init_reweighting_prec self.decreasing =", "import Optional, Any from pandas import DataFrame from copy import", "= new_positions if active_indices.shape[0] > 1: iterand[self.new_ind] = np.sign(self.dual_certificate_value) *", "new_positions if active_indices.shape[0] > 1: x0 = iterand[active_indices] iterand =", "self.diagnostics.loc[self.iter, 'Iter'] = self.iter if np.linalg.norm(self.old_iterand['iterand']) == 0: self.diagnostics.loc[self.iter, 'Relative", "= remember_iterand self.iterand_history = [] init_iterand = {'iterand': self.x0, 'positions':", "** 2 else: gamma = 2/(self.iter + 3) if not", "decreasing self.final_reweighting_prec = final_reweighting_prec super(PolyatomicFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter,", "/ (2 * self.lambda_) self.start = None if verbose is", "self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] = np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement", "self.penalty self.bound = np.linalg.norm(self.data) ** 2 / (2 * self.lambda_)", "self.iter - 1, 'Objective Function'] if self.remember_iterand: self.iterand_history.append(self.iterand['iterand']) def print_diagnostics(self):", "self.multi_spikes_threshold = multi_spikes_threshold self.reweighting = reweighting self.remove_positions = remove_positions self.decreasing", "not None: self.candidate_new.append(indices.shape[0]) self.actual_new.append(self.new_ind.size) if len(self.new_ind) == 0: self.new_ind =", "Certificate Value', 'Objective Function']) self.diagnostics.loc[self.iter, 'Iter'] = self.iter if np.linalg.norm(self.old_iterand['iterand'])", "new_positions = self.old_iterand['positions'] if self.step_size == 'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand),", "t_max=t_max) self.reweighting = reweighting self.last_weight = self.bound def combine_new_impulse(self) ->", "= np.sign(iterand[overvalue]) * self.bound return {'iterand': iterand, 'positions': new_positions} class", "self.bound if overvalue.sum() > 0: print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue]", "Improvement Objective', 'Relative Improvement Iterand', 'Dual Certificate Value', 'Objective Function'])", "new_positions = np.hstack([self.old_iterand['positions'], self.new_ind]) if self.step_size == 'optimal': gamma =", "else: raise ValueError('Stopping strategy must be in [\"relative_improvement\", \"certificate\"]') def", "remember_iterand: bool = False, final_reweighting_prec: float = 1e-4, init_reweighting_prec: float", "self.verbose is not None: print('Warning, dual certificate lower than 1", "else: threshold = maxi - (1 / (self.iter + 2))", "self.new_ind = np.argmax(d) self.dual_certificate_value = dual_certificate[self.new_ind] if self.new_ind in self.old_iterand['positions']:", "must be in [\"fista\", \"ista\"]') solver = APGD(dim=restricted_data_fidelity.shape[1], F=restricted_data_fidelity, G=restricted_regularization,", "= 1e-4, verbose: Optional[int] = 10, remember_iterand: bool = False,", "reweighting self.last_weight = self.bound def combine_new_impulse(self) -> Any: iterand =", "< 1: gamma = np.clip(gamma, 0., 1.) iterand *= (1", "np.ndarray = None, d: float = 75.): if x0 is", "iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound return {'iterand': iterand, 'positions': new_positions}", "float = 1e-4, verbose: Optional[int] = 10, remember_iterand: bool =", "= decreasing self.final_reweighting_prec = final_reweighting_prec super(PolyatomicFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor,", "== 'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ * np.linalg.norm(iterand,", "x0 = iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) else: tmp", "['optimal', 'regular']\") super(VanillaFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy,", "else: new_positions = self.old_iterand['positions'] if self.iter > 0 and self.remove_positions:", "lambda_: Optional[float] = None, lambda_factor: Optional[float] = 0.1, min_iter: int", "/ (self.iter + 2)) * self.epsilon indices = np.where(d >", "max_iter=max_iter, min_iter=min_iter, accuracy_threshold=accuracy_threshold, verbose=verbose, t_max=t_max) def update_iterand(self) -> Any: self.compute_new_impulse()", "forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float] = None, lambda_factor: Optional[float] = 0.1,", "self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] = np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement", "strategy must be in ['optimal', 'regular']\") super(VanillaFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_,", "deepcopy(self.old_iterand['iterand']) if self.new_ind is not None: new_positions = np.unique(np.hstack([self.old_iterand['positions'], self.new_ind]))", "= np.sign(iterand[overvalue]) * self.bound if self.decreasing: self.reweighting_prec = self.init_reweighting_prec /", "{}'.format(self.iter)) @abstractmethod def combine_new_impulse(self) -> Any: pass def update_diagnostics(self): \"\"\"", "(1 - gamma) if self.new_ind is not None: iterand[self.new_ind] +=", "np.linalg.norm(self.forwardOp.mat[:, self.new_ind] * self.bound * np.sign( self.dual_certificate_value) - self.forwardOp @", "= 75.): if x0 is None: x0 = np.zeros(active_indices.shape) injection", "self.last_weight x0 = iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) if", "1e-4, verbose: Optional[int] = 10, remove_positions: bool = False, remember_iterand:", "False, multi_spikes_threshold: float = .7, t_max: float = None): self.remove_positions", "= deepcopy(self.old_iterand['iterand']) if self.new_ind is not None: new_positions = np.unique(np.hstack([self.old_iterand['positions'],", "np.max(d) if self.iter == 0: threshold = self.multi_spikes_threshold * maxi", "remember_iterand=remember_iterand, t_max=t_max) self.reweighting = reweighting self.last_weight = self.bound def combine_new_impulse(self)", "if self.iter == 0: self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] = np.infty", "'Objective Function']) / \\ self.diagnostics.loc[ self.iter - 1, 'Objective Function']", "forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=False,", "* SquaredL2Loss(dim=restricted_forward.shape[0], data=self.data) \\ * restricted_forward # restricted_data_fidelity.lipschitz_cst = self.data_fidelity.lipschitz_cst", "gamma) if self.new_ind is not None: iterand[self.new_ind] += gamma *", "remember_iterand self.iterand_history = [] init_iterand = {'iterand': self.x0, 'positions': np.array([],", "self.iter == 0: return np.infty elif self.stopping_strategy == 'relative_improvement': return", "from pycsou.func.loss import SquaredL2Loss from pycsou.opt.proxalgs import APGD class GenericFWSolverForLasso(TimedGenericIterativeAlgorithm):", "self.bound * np.sign( self.dual_certificate_value) - self.forwardOp @ iterand, 2) **", "GenericFWSolverForLasso(TimedGenericIterativeAlgorithm): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float] =", "- self.data_fidelity.gradient(self.old_iterand['iterand']) / self.lambda_ d = np.abs(dual_certificate) if self.multi_spikes: maxi", "iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) else: tmp = np.zeros(self.dim)", "np.abs(self.forwardOp.adjoint(self.data)).max() self.lambda_ = lambda_ self.penalty = self.lambda_ * L1Norm(dim=self.dim) objective_functional", "if lambda_ is None: lambda_ = lambda_factor * np.abs(self.forwardOp.adjoint(self.data)).max() self.lambda_", "'ista': tau = 1.9 / restricted_data_fidelity.diff_lipschitz_cst acceleration = None else:", "restricted_regularization = self.lambda_ * L1Norm(dim=restricted_data_fidelity.shape[1]) if self.reweighting == 'fista': acceleration", "raise ValueError('Reweighting strategy must be in [\"fista\", \"ista\"]') solver =", "'Objective Function'] if self.remember_iterand: self.iterand_history.append(self.iterand['iterand']) def print_diagnostics(self): print(dict(self.diagnostics.loc[self.iter])) def stopping_metric(self):", "np.linalg.norm(self.forwardOp @ iterand, 2) ** 2 else: gamma = 2/(self.iter", "iterand *= (1 - gamma) if self.new_ind is not None:", "np.abs(iterand) > self.bound if overvalue.sum() > 0: #Sanity check, never", "> 0 and self.remove_positions: active_indices = np.unique(np.hstack([iterand.nonzero()[0], self.new_ind])) else: active_indices", "10, max_iter: int = 500, stopping_strategy: str = 'certificate', accuracy_threshold:", "forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=True,", "None): if step_size in ['optimal', 'regular']: self.step_size = step_size else:", "/ (np.linalg.norm(column, 2) ** 2) self.last_weight = iterand[active_indices] overvalue =", "#Sanity check, never been triggered in practice print(\"Overvalue at coordinates", "len(self.new_ind) == 0: self.new_ind = None self.dual_certificate_value = max(dual_certificate.min(), dual_certificate.max(),", "= None self.dual_certificate_value = max(dual_certificate.min(), dual_certificate.max(), key=abs) else: self.new_ind =", "self.dual_certificate_value = 1 / lambda_factor self.new_ind = None self.epsilon =", "bool = False, step_size: str = 'optimal', t_max: float =", "= np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ * ( 1. * np.linalg.norm(iterand,", "{} / {}\".format(threshold, maxi)) # print('Candidate indices: {}\\n'.format(indices.shape)) self.new_ind =", "None: self.last_weight = iterand[self.new_ind] else: tmp = np.zeros(self.dim) tmp[active_indices] =", "not None: new_positions = np.unique(np.hstack([self.old_iterand['positions'], self.new_ind])) if self.iter > 0", "* np.linalg.norm(iterand, 1) + (np.abs(self.dual_certificate_value) - 1.) * self.bound) gamma", "= reweighting self.last_weight = self.bound def combine_new_impulse(self) -> Any: iterand", "False, final_reweighting_prec: float = 1e-4, init_reweighting_prec: float = .2, decreasing:", "is not None: new_positions = np.unique(np.hstack([self.old_iterand['positions'], self.new_ind])) if self.iter >", "L1Norm from pycsou.func.loss import SquaredL2Loss from pycsou.opt.proxalgs import APGD class", "2 else: gamma = 2/(self.iter + 3) else: new_positions =", "new_positions} class FullyCorrectiveFWSolverForLasso(VanillaFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_:", "value is computed after iteration Returns ------- \"\"\" if self.iter", "lower than 1 at iteration {}'.format(self.iter)) @abstractmethod def combine_new_impulse(self) ->", "ValueError(\"Step size strategy must be in ['optimal', 'regular']\") super(VanillaFWSolverForLasso, self).__init__(data,", "** 2 else: gamma = 2/(self.iter + 3) else: new_positions", "self.lambda_ d = np.abs(dual_certificate) if self.multi_spikes: maxi = np.max(d) if", "injection = pl.sampling.SubSampling(self.dim, active_indices, dtype=float).get_adjointOp() restricted_forward = pl.DenseLinearOperator( self.forwardOp.mat[:, active_indices])", "self.penalty = self.lambda_ * L1Norm(dim=self.dim) objective_functional = self.data_fidelity + self.penalty", "not None: new_positions = np.hstack([self.old_iterand['positions'], self.new_ind]) if self.step_size == 'optimal':", "if self.step_size == 'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_", "bool = False, multi_spikes_threshold: float = .7, multi_spikes: bool =", "pl.DenseLinearOperator( self.forwardOp.mat[:, active_indices]) restricted_forward.compute_lipschitz_cst(tol=1e-3) restricted_data_fidelity = (1 / 2) *", "return abs(abs(value) - 1) else: raise ValueError('Stopping strategy must be", "= np.hstack([self.old_iterand['positions'], self.new_ind]) if self.step_size == 'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand),", "np.hstack([self.old_iterand['positions'], self.new_ind]) if self.step_size == 'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand), iterand)", "* self.bound * np.sign( self.dual_certificate_value) - self.forwardOp @ iterand, 2)", "min_iter=min_iter, accuracy_threshold=accuracy_threshold, verbose=verbose, t_max=t_max) def update_iterand(self) -> Any: self.compute_new_impulse() res", "self.reweighting = reweighting self.last_weight = self.bound def combine_new_impulse(self) -> Any:", "l22_loss = (1 / 2) * SquaredL2Loss(dim=self.forwardOp.shape[0], data=self.data) self.data_fidelity =", "= False, remember_iterand: bool = False, final_reweighting_prec: float = 1e-4,", "= True, reweighting: str = 'ista', t_max: float = None):", "(2 * self.lambda_) self.start = None if verbose is not", "def combine_new_impulse(self) -> Any: iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind is", "+ self.lambda_ * ( 1. * np.linalg.norm(iterand, 1) + (np.abs(self.dual_certificate_value)", "pycsou.func.loss import SquaredL2Loss from pycsou.opt.proxalgs import APGD class GenericFWSolverForLasso(TimedGenericIterativeAlgorithm): def", "from typing import Optional, Any from pandas import DataFrame from", "self.iter if np.linalg.norm(self.old_iterand['iterand']) == 0: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] =", "0: #Sanity check, never been triggered in practice print(\"Overvalue at", "Objective'] = np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] = (self.diagnostics.loc[", "None: lambda_ = lambda_factor * np.abs(self.forwardOp.adjoint(self.data)).max() self.lambda_ = lambda_ self.penalty", "accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, t_max=t_max) self.reweighting = reweighting self.last_weight = self.bound", "> max(threshold, 1.))[0] # print(\"Threshold: {} / {}\".format(threshold, maxi)) #", "self.remember_iterand = remember_iterand self.iterand_history = [] init_iterand = {'iterand': self.x0,", "certificate lower than 1 at iteration {}'.format(self.iter)) @abstractmethod def combine_new_impulse(self)", "= 2/(self.iter + 3) else: new_positions = self.old_iterand['positions'] if self.step_size", "1.) iterand *= (1 - gamma) if self.new_ind is not", "10, remember_iterand: bool = False, remove_positions: bool = False, reweighting_prec:", "dual_certificate = - self.data_fidelity.gradient(self.old_iterand['iterand']) / self.lambda_ d = np.abs(dual_certificate) if", "if self.verbose is not None: self.candidate_new.append(indices.shape[0]) self.actual_new.append(self.new_ind.size) if len(self.new_ind) ==", "self.old_iterand['positions']: self.new_ind = None # already present position if abs(self.dual_certificate_value)", "if self.iter == 0: return np.infty elif self.stopping_strategy == 'relative_improvement':", "if self.remember_iterand: self.iterand_history.append(self.iterand['iterand']) def print_diagnostics(self): print(dict(self.diagnostics.loc[self.iter])) def stopping_metric(self): if self.iter", "max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=False, t_max=t_max) def combine_new_impulse(self) ->", "self.remove_positions: active_indices = np.unique(iterand.nonzero()[0]) else: active_indices = new_positions if active_indices.shape[0]", "= np.unique(iterand.nonzero()[0]) else: active_indices = new_positions if active_indices.shape[0] > 1:", "active_indices = new_positions if active_indices.shape[0] > 1: iterand[self.new_ind] = np.sign(self.dual_certificate_value)", "1: iterand[self.new_ind] = np.sign(self.dual_certificate_value) * self.last_weight x0 = iterand[active_indices] iterand", "float = .2, decreasing: bool = False, multi_spikes_threshold: float =", "Objective']) elif self.stopping_strategy == 'certificate': value = self.diagnostics.loc[self.iter - 1,", "= stopping_strategy self.accuracy_threshold = accuracy_threshold self.multi_spikes = multi_spikes self.multi_spikes_threshold =", "= maxi - (1 / (self.iter + 2)) * self.epsilon", "t_max: float = None): self.data = data self.forwardOp = forwardOp", "self.bound def combine_new_impulse(self) -> Any: iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind", "np.unique(np.hstack([self.old_iterand['positions'], self.new_ind])) if self.iter > 0 and self.remove_positions: active_indices =", "0: self.diagnostics = DataFrame( columns=['Iter', 'Relative Improvement Objective', 'Relative Improvement", "= 0.1, min_iter: int = 10, max_iter: int = 500,", "multi_spikes_threshold: float = .7, multi_spikes: bool = True, reweighting: str", "= np.zeros(self.dim) self.dual_certificate_value = 1 / lambda_factor self.new_ind = None", "1.))[0] # print(\"Threshold: {} / {}\".format(threshold, maxi)) # print('Candidate indices:", "self.new_ind = None # already present position if abs(self.dual_certificate_value) <", "0.1, min_iter: int = 10, max_iter: int = 500, stopping_strategy:", "bool = False, remember_iterand: bool = False, decreasing: bool =", "self.epsilon indices = np.where(d > max(threshold, 1.))[0] # print(\"Threshold: {}", "print(dict(self.diagnostics.loc[self.iter])) def stopping_metric(self): if self.iter == 0: return np.infty elif", "at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound if self.decreasing:", "compute_new_impulse(self): dual_certificate = - self.data_fidelity.gradient(self.old_iterand['iterand']) / self.lambda_ d = np.abs(dual_certificate)", "1: gamma = np.clip(gamma, 0., 1.) iterand *= (1 -", "= np.max(d) if self.iter == 0: threshold = self.multi_spikes_threshold *", "\"certificate\"]') def restricted_support_lasso(self, active_indices: np.ndarray, accuracy: float, x0: np.ndarray =", "1, 'Relative Improvement Objective']) elif self.stopping_strategy == 'certificate': value =", "+ 2)) * self.epsilon indices = np.where(d > max(threshold, 1.))[0]", "verbose=verbose, remember_iterand=remember_iterand, t_max=t_max) self.reweighting = reweighting self.last_weight = self.bound def", "'positions': new_positions} class FullyCorrectiveFWSolverForLasso(VanillaFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator,", "= 10, max_iter: int = 500, stopping_strategy: str = 'certificate',", "'optimal', t_max: float = None): if step_size in ['optimal', 'regular']:", "* self.forwardOp if lambda_ is None: lambda_ = lambda_factor *", "False, remember_iterand: bool = False, final_reweighting_prec: float = 1e-4, init_reweighting_prec:", "deepcopy from abc import abstractmethod from utils import TimedGenericIterativeAlgorithm import", "if self.verbose is not None: print('Warning, dual certificate lower than", "remove_positions: bool = False, reweighting_prec: float = 1e-4, reweighting: str", "if active_indices.shape[0] > 1: x0 = iterand[active_indices] iterand = self.restricted_support_lasso(active_indices,", "else: self.new_ind = np.argmax(d) self.dual_certificate_value = dual_certificate[self.new_ind] if self.new_ind in", "pcore import pycsou.linop as pl from pycsou.func.penalty import L1Norm from", "Objective', 'Relative Improvement Iterand', 'Dual Certificate Value', 'Objective Function']) self.diagnostics.loc[self.iter,", "elif self.stopping_strategy == 'certificate': value = self.diagnostics.loc[self.iter - 1, 'Dual", "( 1. * np.linalg.norm(iterand, 1) + (np.abs(self.dual_certificate_value) - 1.) *", "res = self.combine_new_impulse() return res def compute_new_impulse(self): dual_certificate = -", "column = self.forwardOp(tmp) iterand[active_indices] = np.dot(self.data, column) / (np.linalg.norm(column, 2)", "1. column = self.forwardOp(tmp) iterand[active_indices] = np.dot(self.data, column) / (np.linalg.norm(column,", "class PolyatomicFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float]", "2) * SquaredL2Loss(dim=self.forwardOp.shape[0], data=self.data) self.data_fidelity = l22_loss * self.forwardOp if", "step_size: str = 'optimal', t_max: float = None): if step_size", "decreasing: bool = False, multi_spikes_threshold: float = .7, multi_spikes: bool", "maxi self.epsilon = (1 - self.multi_spikes_threshold) * maxi else: threshold", "restricted_data_fidelity.lipschitz_cst = self.data_fidelity.lipschitz_cst # restricted_data_fidelity.diff_lipschitz_cst = self.data_fidelity.diff_lipschitz_cst restricted_regularization = self.lambda_", "not None: self.candidate_new = [] self.actual_new = [] super(GenericFWSolverForLasso, self).__init__(objective_functional=objective_functional,", "acceleration = None else: raise ValueError('Reweighting strategy must be in", "- (1 / (self.iter + 2)) * self.epsilon indices =", "'fista', t_max: float = None): self.remove_positions = remove_positions self.reweighting_prec =", "* restricted_forward # restricted_data_fidelity.lipschitz_cst = self.data_fidelity.lipschitz_cst # restricted_data_fidelity.diff_lipschitz_cst = self.data_fidelity.diff_lipschitz_cst", "{}\".format(threshold, maxi)) # print('Candidate indices: {}\\n'.format(indices.shape)) self.new_ind = np.setdiff1d(indices, self.old_iterand['positions'],", "'positions': np.array([], dtype=int)} l22_loss = (1 / 2) * SquaredL2Loss(dim=self.forwardOp.shape[0],", ".7, t_max: float = None): self.remove_positions = remove_positions self.reweighting_prec =", "1: x0 = iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) else:", "(1 / (self.iter + 2)) * self.epsilon indices = np.where(d", "self.actual_new = [] super(GenericFWSolverForLasso, self).__init__(objective_functional=objective_functional, init_iterand=init_iterand, max_iter=max_iter, min_iter=min_iter, accuracy_threshold=accuracy_threshold, verbose=verbose,", "super(GenericFWSolverForLasso, self).__init__(objective_functional=objective_functional, init_iterand=init_iterand, max_iter=max_iter, min_iter=min_iter, accuracy_threshold=accuracy_threshold, verbose=verbose, t_max=t_max) def update_iterand(self)", "self.new_ind is not None: new_positions = np.unique(np.hstack([self.old_iterand['positions'], self.new_ind])) if self.iter", "init_reweighting_prec: float = .2, decreasing: bool = False, multi_spikes_threshold: float", "accuracy: float, x0: np.ndarray = None, d: float = 75.):", "* self.last_weight x0 = iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0)", "= iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) else: tmp =", "= (self.diagnostics.loc[ self.iter - 1, 'Objective Function'] - self.diagnostics.loc[ self.iter,", "x0 = np.zeros(active_indices.shape) injection = pl.sampling.SubSampling(self.dim, active_indices, dtype=float).get_adjointOp() restricted_forward =", "bool = True, reweighting: str = 'ista', t_max: float =", "t_max: float = None): self.remove_positions = remove_positions self.reweighting_prec = reweighting_prec", "t_max=t_max) def update_iterand(self) -> Any: self.compute_new_impulse() res = self.combine_new_impulse() return", "self.new_ind]) if self.step_size == 'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand), iterand) +", "Optional[float] = 0.1, min_iter: int = 10, max_iter: int =", "self.data_fidelity.diff_lipschitz_cst restricted_regularization = self.lambda_ * L1Norm(dim=restricted_data_fidelity.shape[1]) if self.reweighting == 'fista':", "bool = False, decreasing: bool = False, multi_spikes_threshold: float =", "multi_spikes self.multi_spikes_threshold = multi_spikes_threshold self.reweighting = reweighting self.remove_positions = remove_positions", "self.reweighting_prec = reweighting_prec super(FullyCorrectiveFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter,", "float = None): self.data = data self.forwardOp = forwardOp self.stopping_strategy", "forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, t_max=t_max)", "1 at iteration {}'.format(self.iter)) @abstractmethod def combine_new_impulse(self) -> Any: pass", "* maxi else: threshold = maxi - (1 / (self.iter", "d: float = 75.): if x0 is None: x0 =", "self.remove_positions: active_indices = np.unique(np.hstack([iterand.nonzero()[0], self.new_ind])) else: active_indices = new_positions else:", "Dual ceritificate value is computed after iteration Returns ------- \"\"\"", "= 10, remove_positions: bool = False, remember_iterand: bool = False,", "import DataFrame from copy import deepcopy from abc import abstractmethod", "'relative_improvement': return abs(self.diagnostics.loc[self.iter - 1, 'Relative Improvement Objective']) elif self.stopping_strategy", "** 2) self.last_weight = iterand[active_indices] overvalue = np.abs(iterand) > self.bound", "import APGD class GenericFWSolverForLasso(TimedGenericIterativeAlgorithm): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator,", "'certificate': value = self.diagnostics.loc[self.iter - 1, 'Dual Certificate Value'] return", "stopping_metric(self): if self.iter == 0: return np.infty elif self.stopping_strategy ==", "verbose is not None: self.candidate_new = [] self.actual_new = []", "= np.linalg.norm( self.old_iterand['iterand'] - self.iterand['iterand']) / np.linalg.norm( self.old_iterand['iterand']) self.diagnostics.loc[self.iter, 'Dual", "2/(self.iter + 3) if not 0 < gamma < 1:", "= .2, decreasing: bool = False, multi_spikes_threshold: float = .7,", "active_indices: np.ndarray, accuracy: float, x0: np.ndarray = None, d: float", "bool = False, reweighting_prec: float = 1e-4, reweighting: str =", "= (1 - self.multi_spikes_threshold) * maxi else: threshold = maxi", "iteration self.diagnostics.loc[self.iter, 'Objective Function'] = self.objective_functional(self.iterand['iterand']) if self.iter == 0:", "= self.old_iterand['positions'] if self.iter > 0 and self.remove_positions: active_indices =", "1e-4, verbose: Optional[int] = 10, remember_iterand: bool = False, step_size:", "self.reweighting_prec = init_reweighting_prec self.init_reweighting_prec = init_reweighting_prec self.decreasing = decreasing self.final_reweighting_prec", "self.reweighting_prec, x0=x0) if self.new_ind is not None: self.last_weight = iterand[self.new_ind]", "def combine_new_impulse(self) -> Any: pass def update_diagnostics(self): \"\"\" Dual ceritificate", "at iteration {}'.format(self.iter)) @abstractmethod def combine_new_impulse(self) -> Any: pass def", "self.forwardOp = forwardOp self.stopping_strategy = stopping_strategy self.accuracy_threshold = accuracy_threshold self.multi_spikes", "np.sign(self.dual_certificate_value) * self.bound return {'iterand': iterand, 'positions': new_positions} class FullyCorrectiveFWSolverForLasso(VanillaFWSolverForLasso):", "1, 'Objective Function'] - self.diagnostics.loc[ self.iter, 'Objective Function']) / \\", "/ {}\".format(threshold, maxi)) # print('Candidate indices: {}\\n'.format(indices.shape)) self.new_ind = np.setdiff1d(indices,", "position if abs(self.dual_certificate_value) < 1.: if self.verbose is not None:", "is None: x0 = np.zeros(active_indices.shape) injection = pl.sampling.SubSampling(self.dim, active_indices, dtype=float).get_adjointOp()", "# before iteration self.diagnostics.loc[self.iter, 'Objective Function'] = self.objective_functional(self.iterand['iterand']) if self.iter", "int = 10, max_iter: int = 500, stopping_strategy: str =", "ceritificate value is computed after iteration Returns ------- \"\"\" if", "SquaredL2Loss(dim=self.forwardOp.shape[0], data=self.data) self.data_fidelity = l22_loss * self.forwardOp if lambda_ is", "if self.reweighting == 'fista': acceleration = 'CD' tau = None", "\"\"\" Dual ceritificate value is computed after iteration Returns -------", "if self.iter == 0: threshold = self.multi_spikes_threshold * maxi self.epsilon", "= None, d: float = 75.): if x0 is None:", "= np.setdiff1d(indices, self.old_iterand['positions'], assume_unique=True) if self.verbose is not None: self.candidate_new.append(indices.shape[0])", "== 'certificate': value = self.diagnostics.loc[self.iter - 1, 'Dual Certificate Value']", "must be in [\"relative_improvement\", \"certificate\"]') def restricted_support_lasso(self, active_indices: np.ndarray, accuracy:", "* self.bound return {'iterand': iterand, 'positions': new_positions} class PolyatomicFWSolverForLasso(GenericFWSolverForLasso): def", "def print_diagnostics(self): print(dict(self.diagnostics.loc[self.iter])) def stopping_metric(self): if self.iter == 0: return", "is not None: new_positions = np.hstack([self.old_iterand['positions'], self.new_ind]) if self.step_size ==", "* np.sign(self.dual_certificate_value) * self.bound return {'iterand': iterand, 'positions': new_positions} class", "'Objective Function'] = self.objective_functional(self.iterand['iterand']) if self.iter == 0: self.diagnostics.loc[self.iter, 'Relative", "str = 'optimal', t_max: float = None): if step_size in", "self.reweighting_prec, x0=x0) else: tmp = np.zeros(self.dim) tmp[active_indices] = 1. column", "self.bound return {'iterand': iterand, 'positions': new_positions} class FullyCorrectiveFWSolverForLasso(VanillaFWSolverForLasso): def __init__(self,", "self.new_ind = None self.epsilon = None self.remember_iterand = remember_iterand self.iterand_history", "not None: self.last_weight = iterand[self.new_ind] else: tmp = np.zeros(self.dim) tmp[active_indices]", "lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, t_max=t_max) self.reweighting =", "self.new_ind is not None: iterand[self.new_ind] += gamma * np.sign(self.dual_certificate_value) *", "self.init_reweighting_prec = init_reweighting_prec self.decreasing = decreasing self.final_reweighting_prec = final_reweighting_prec super(PolyatomicFWSolverForLasso,", "iteration Returns ------- \"\"\" if self.iter == 0: self.diagnostics =", "= np.zeros(active_indices.shape) injection = pl.sampling.SubSampling(self.dim, active_indices, dtype=float).get_adjointOp() restricted_forward = pl.DenseLinearOperator(", "< 1.: if self.verbose is not None: print('Warning, dual certificate", "'Dual Certificate Value'] return abs(abs(value) - 1) else: raise ValueError('Stopping", "self.forwardOp.mat[:, active_indices]) restricted_forward.compute_lipschitz_cst(tol=1e-3) restricted_data_fidelity = (1 / 2) * SquaredL2Loss(dim=restricted_forward.shape[0],", "data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float] = None, lambda_factor: Optional[float]", "= - self.data_fidelity.gradient(self.old_iterand['iterand']) / self.lambda_ d = np.abs(dual_certificate) if self.multi_spikes:", "Iterand'] = np.linalg.norm( self.old_iterand['iterand'] - self.iterand['iterand']) / np.linalg.norm( self.old_iterand['iterand']) self.diagnostics.loc[self.iter,", "* L1Norm(dim=restricted_data_fidelity.shape[1]) if self.reweighting == 'fista': acceleration = 'CD' tau", "2) self.last_weight = iterand[active_indices] overvalue = np.abs(iterand) > self.bound if", "print(\"Threshold: {} / {}\".format(threshold, maxi)) # print('Candidate indices: {}\\n'.format(indices.shape)) self.new_ind", "** 2) overvalue = np.abs(iterand) > self.bound if overvalue.sum() >", "threshold = self.multi_spikes_threshold * maxi self.epsilon = (1 - self.multi_spikes_threshold)", "0., 1.) iterand *= (1 - gamma) if self.new_ind is", "update_iterand(self) -> Any: self.compute_new_impulse() res = self.combine_new_impulse() return res def", "Certificate Value'] return abs(abs(value) - 1) else: raise ValueError('Stopping strategy", "FullyCorrectiveFWSolverForLasso(VanillaFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float] =", "= np.abs(iterand) > self.bound if overvalue.sum() > 0: print(\"Overvalue at", "= lambda_ self.penalty = self.lambda_ * L1Norm(dim=self.dim) objective_functional = self.data_fidelity", "iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) if self.new_ind is not None:", "reweighting self.remove_positions = remove_positions self.decreasing = decreasing self.dim = self.forwardOp.shape[1]", "0: return np.infty elif self.stopping_strategy == 'relative_improvement': return abs(self.diagnostics.loc[self.iter -", "None # already present position if abs(self.dual_certificate_value) < 1.: if", "deepcopy(self.old_iterand['iterand']) if self.new_ind is not None: new_positions = np.hstack([self.old_iterand['positions'], self.new_ind])", "Function']) self.diagnostics.loc[self.iter, 'Iter'] = self.iter if np.linalg.norm(self.old_iterand['iterand']) == 0: self.diagnostics.loc[self.iter,", "np.linalg.norm(self.data) ** 2 / (2 * self.lambda_) self.start = None", "2) ** 2 else: gamma = 2/(self.iter + 3) if", "accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=True, multi_spikes_threshold=multi_spikes_threshold, reweighting='ista', t_max=t_max) def combine_new_impulse(self): iterand", "bool = False, remember_iterand: bool = False, final_reweighting_prec: float =", "/ self.lambda_ d = np.abs(dual_certificate) if self.multi_spikes: maxi = np.max(d)", "- 1, 'Dual Certificate Value'] return abs(abs(value) - 1) else:", "computed after iteration Returns ------- \"\"\" if self.iter == 0:", "(np.abs(self.dual_certificate_value) - 1.) * self.bound) gamma /= np.linalg.norm(self.forwardOp.mat[:, self.new_ind] *", "= [] super(GenericFWSolverForLasso, self).__init__(objective_functional=objective_functional, init_iterand=init_iterand, max_iter=max_iter, min_iter=min_iter, accuracy_threshold=accuracy_threshold, verbose=verbose, t_max=t_max)", "self.old_iterand['positions'], assume_unique=True) if self.verbose is not None: self.candidate_new.append(indices.shape[0]) self.actual_new.append(self.new_ind.size) if", "> 1: iterand[self.new_ind] = np.sign(self.dual_certificate_value) * self.last_weight x0 = iterand[active_indices]", "res def compute_new_impulse(self): dual_certificate = - self.data_fidelity.gradient(self.old_iterand['iterand']) / self.lambda_ d", "'positions': new_positions} class PolyatomicFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator,", "self.new_ind])) if self.iter > 0 and self.remove_positions: active_indices = np.unique(np.hstack([iterand.nonzero()[0],", "= forwardOp self.stopping_strategy = stopping_strategy self.accuracy_threshold = accuracy_threshold self.multi_spikes =", "'fista': acceleration = 'CD' tau = None elif self.reweighting ==", "= 1 / lambda_factor self.new_ind = None self.epsilon = None", "np.linalg.norm(iterand, 1) + (np.abs(self.dual_certificate_value) - 1.) * self.bound) gamma /=", "* np.sign( self.dual_certificate_value) - self.forwardOp @ iterand, 2) ** 2", "self.new_ind = None self.dual_certificate_value = max(dual_certificate.min(), dual_certificate.max(), key=abs) else: self.new_ind", "lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=True, multi_spikes_threshold=multi_spikes_threshold, reweighting='ista',", "dtype=int)} l22_loss = (1 / 2) * SquaredL2Loss(dim=self.forwardOp.shape[0], data=self.data) self.data_fidelity", "self.multi_spikes: maxi = np.max(d) if self.iter == 0: threshold =", "1e-4, verbose: Optional[int] = 10, remember_iterand: bool = False, remove_positions:", "iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind is not None: new_positions =", "10, remove_positions: bool = False, remember_iterand: bool = False, final_reweighting_prec:", "= .7, multi_spikes: bool = True, reweighting: str = 'ista',", "/ lambda_factor self.new_ind = None self.epsilon = None self.remember_iterand =", "overvalue = np.abs(iterand) > self.bound if overvalue.sum() > 0: print(\"Overvalue", "column) / (np.linalg.norm(column, 2) ** 2) overvalue = np.abs(iterand) >", "self.diagnostics.loc[self.iter, 'Objective Function'] = self.objective_functional(self.iterand['iterand']) if self.iter == 0: self.diagnostics.loc[self.iter,", "= self.objective_functional(self.iterand['iterand']) if self.iter == 0: self.diagnostics.loc[self.iter, 'Relative Improvement Objective']", "if self.iter > 0 and self.remove_positions: active_indices = np.unique(iterand.nonzero()[0]) else:", "overvalue.sum() > 0: print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue])", "/ 2) * SquaredL2Loss(dim=self.forwardOp.shape[0], data=self.data) self.data_fidelity = l22_loss * self.forwardOp", "x0=x0, tau=tau, acceleration=acceleration, verbose=None, accuracy_threshold=accuracy, d=d, max_iter=2000, min_iter=1) return injection(solver.iterate()[0]['iterand'])", "gamma = np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ * np.linalg.norm(iterand, 1) gamma", "return np.infty elif self.stopping_strategy == 'relative_improvement': return abs(self.diagnostics.loc[self.iter - 1,", "remember_iterand=remember_iterand, multi_spikes=False, t_max=t_max) def combine_new_impulse(self) -> Any: iterand = deepcopy(self.old_iterand['iterand'])", "Function'] = self.objective_functional(self.iterand['iterand']) if self.iter == 0: self.diagnostics.loc[self.iter, 'Relative Improvement", "float = 1e-4, verbose: Optional[int] = 10, remove_positions: bool =", "data=self.data) self.data_fidelity = l22_loss * self.forwardOp if lambda_ is None:", "import TimedGenericIterativeAlgorithm import pycsou.core as pcore import pycsou.linop as pl", "= dual_certificate[self.new_ind] if self.new_ind in self.old_iterand['positions']: self.new_ind = None #", "update_diagnostics(self): \"\"\" Dual ceritificate value is computed after iteration Returns", "'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ * np.linalg.norm(iterand, 1)", "self.forwardOp(tmp) iterand[active_indices] = np.dot(self.data, column) / (np.linalg.norm(column, 2) ** 2)", "in practice print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) *", "np.zeros(active_indices.shape) injection = pl.sampling.SubSampling(self.dim, active_indices, dtype=float).get_adjointOp() restricted_forward = pl.DenseLinearOperator( self.forwardOp.mat[:,", "== 'ista': tau = 1.9 / restricted_data_fidelity.diff_lipschitz_cst acceleration = None", "'certificate', accuracy_threshold: float = 1e-4, verbose: Optional[int] = 10, remove_positions:", "self.candidate_new = [] self.actual_new = [] super(GenericFWSolverForLasso, self).__init__(objective_functional=objective_functional, init_iterand=init_iterand, max_iter=max_iter,", "def update_diagnostics(self): \"\"\" Dual ceritificate value is computed after iteration", "Value'] return abs(abs(value) - 1) else: raise ValueError('Stopping strategy must", "2) overvalue = np.abs(iterand) > self.bound if overvalue.sum() > 0:", "if step_size in ['optimal', 'regular']: self.step_size = step_size else: raise", "'Relative Improvement Objective']) elif self.stopping_strategy == 'certificate': value = self.diagnostics.loc[self.iter", "iterand, 2) ** 2 else: gamma = 2/(self.iter + 3)", "gamma /= np.linalg.norm(self.forwardOp @ iterand, 2) ** 2 else: gamma", "typing import Optional, Any from pandas import DataFrame from copy", "pycsou.opt.proxalgs import APGD class GenericFWSolverForLasso(TimedGenericIterativeAlgorithm): def __init__(self, data: np.ndarray, forwardOp:", "iterand[active_indices] = np.dot(self.data, column) / (np.linalg.norm(column, 2) ** 2) overvalue", "iterand[self.new_ind] += gamma * np.sign(self.dual_certificate_value) * self.bound return {'iterand': iterand,", "L1Norm(dim=self.dim) objective_functional = self.data_fidelity + self.penalty self.bound = np.linalg.norm(self.data) **", "self.new_ind = np.setdiff1d(indices, self.old_iterand['positions'], assume_unique=True) if self.verbose is not None:", "be in [\"relative_improvement\", \"certificate\"]') def restricted_support_lasso(self, active_indices: np.ndarray, accuracy: float,", "== 0: self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] = np.infty else: self.diagnostics.loc[self.iter,", "== 0: self.new_ind = None self.dual_certificate_value = max(dual_certificate.min(), dual_certificate.max(), key=abs)", "Optional, Any from pandas import DataFrame from copy import deepcopy", "tau = None elif self.reweighting == 'ista': tau = 1.9", "= None elif self.reweighting == 'ista': tau = 1.9 /", "verbose: Optional[int] = 10, remove_positions: bool = False, remember_iterand: bool", "if self.new_ind is not None: iterand[self.new_ind] += gamma * np.sign(self.dual_certificate_value)", "= 500, stopping_strategy: str = 'certificate', accuracy_threshold: float = 1e-4,", "= np.dot(self.data, column) / (np.linalg.norm(column, 2) ** 2) overvalue =", "* L1Norm(dim=self.dim) objective_functional = self.data_fidelity + self.penalty self.bound = np.linalg.norm(self.data)", "= iterand[self.new_ind] else: tmp = np.zeros(self.dim) tmp[active_indices] = 1. column", "self.x0 = np.zeros(self.dim) self.dual_certificate_value = 1 / lambda_factor self.new_ind =", "def compute_new_impulse(self): dual_certificate = - self.data_fidelity.gradient(self.old_iterand['iterand']) / self.lambda_ d =", "# restricted_data_fidelity.diff_lipschitz_cst = self.data_fidelity.diff_lipschitz_cst restricted_regularization = self.lambda_ * L1Norm(dim=restricted_data_fidelity.shape[1]) if", "1) + (np.abs(self.dual_certificate_value) - 1.) * self.bound) gamma /= np.linalg.norm(self.forwardOp.mat[:,", "/= np.linalg.norm(self.forwardOp @ iterand, 2) ** 2 else: gamma =", "False, multi_spikes_threshold: float = .7, multi_spikes: bool = True, reweighting:", "class VanillaFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float]", "self.remove_positions = remove_positions self.decreasing = decreasing self.dim = self.forwardOp.shape[1] self.x0", "== 0: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] = np.infty else: self.diagnostics.loc[self.iter,", "already present position if abs(self.dual_certificate_value) < 1.: if self.verbose is", "return res def compute_new_impulse(self): dual_certificate = - self.data_fidelity.gradient(self.old_iterand['iterand']) / self.lambda_", "1) else: raise ValueError('Stopping strategy must be in [\"relative_improvement\", \"certificate\"]')", "abs(self.diagnostics.loc[self.iter - 1, 'Relative Improvement Objective']) elif self.stopping_strategy == 'certificate':", "Returns ------- \"\"\" if self.iter == 0: self.diagnostics = DataFrame(", "= False, remove_positions: bool = False, reweighting_prec: float = 1e-4,", "remove_positions self.reweighting_prec = init_reweighting_prec self.init_reweighting_prec = init_reweighting_prec self.decreasing = decreasing", "remember_iterand: bool = False, decreasing: bool = False, multi_spikes_threshold: float", "# restricted_data_fidelity.lipschitz_cst = self.data_fidelity.lipschitz_cst # restricted_data_fidelity.diff_lipschitz_cst = self.data_fidelity.diff_lipschitz_cst restricted_regularization =", "iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) if self.new_ind is not", "columns=['Iter', 'Relative Improvement Objective', 'Relative Improvement Iterand', 'Dual Certificate Value',", "objective_functional = self.data_fidelity + self.penalty self.bound = np.linalg.norm(self.data) ** 2", "and self.remove_positions: active_indices = np.unique(iterand.nonzero()[0]) else: active_indices = new_positions if", "remember_iterand: bool = False, step_size: str = 'optimal', t_max: float", "self.decreasing = decreasing self.final_reweighting_prec = final_reweighting_prec super(PolyatomicFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_,", "= step_size else: raise ValueError(\"Step size strategy must be in", "is not None: print('Warning, dual certificate lower than 1 at", "np.argmax(d) self.dual_certificate_value = dual_certificate[self.new_ind] if self.new_ind in self.old_iterand['positions']: self.new_ind =", "if np.linalg.norm(self.old_iterand['iterand']) == 0: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] = np.infty", "in [\"relative_improvement\", \"certificate\"]') def restricted_support_lasso(self, active_indices: np.ndarray, accuracy: float, x0:", "accuracy_threshold=accuracy, d=d, max_iter=2000, min_iter=1) return injection(solver.iterate()[0]['iterand']) class VanillaFWSolverForLasso(GenericFWSolverForLasso): def __init__(self,", "self.bound = np.linalg.norm(self.data) ** 2 / (2 * self.lambda_) self.start", "-> Any: self.compute_new_impulse() res = self.combine_new_impulse() return res def compute_new_impulse(self):", "np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ * ( 1. * np.linalg.norm(iterand, 1)", "self.iter - 1, 'Objective Function'] - self.diagnostics.loc[ self.iter, 'Objective Function'])", "== 'fista': acceleration = 'CD' tau = None elif self.reweighting", "'Dual Certificate Value', 'Objective Function']) self.diagnostics.loc[self.iter, 'Iter'] = self.iter if", "key=abs) else: self.new_ind = np.argmax(d) self.dual_certificate_value = dual_certificate[self.new_ind] if self.new_ind", "Any from pandas import DataFrame from copy import deepcopy from", "= self.data_fidelity.diff_lipschitz_cst restricted_regularization = self.lambda_ * L1Norm(dim=restricted_data_fidelity.shape[1]) if self.reweighting ==", "False, remove_positions: bool = False, reweighting_prec: float = 1e-4, reweighting:", "+ 3) else: new_positions = self.old_iterand['positions'] if self.step_size == 'optimal':", "== 'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ * (", "Any: iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind is not None: new_positions", "else: active_indices = new_positions if active_indices.shape[0] > 1: iterand[self.new_ind] =", "def update_iterand(self) -> Any: self.compute_new_impulse() res = self.combine_new_impulse() return res", "[\"fista\", \"ista\"]') solver = APGD(dim=restricted_data_fidelity.shape[1], F=restricted_data_fidelity, G=restricted_regularization, x0=x0, tau=tau, acceleration=acceleration,", "lambda_factor * np.abs(self.forwardOp.adjoint(self.data)).max() self.lambda_ = lambda_ self.penalty = self.lambda_ *", "G=restricted_regularization, x0=x0, tau=tau, acceleration=acceleration, verbose=None, accuracy_threshold=accuracy, d=d, max_iter=2000, min_iter=1) return", "pl from pycsou.func.penalty import L1Norm from pycsou.func.loss import SquaredL2Loss from", "np.where(d > max(threshold, 1.))[0] # print(\"Threshold: {} / {}\".format(threshold, maxi))", "- gamma) if self.new_ind is not None: iterand[self.new_ind] += gamma", "stopping_strategy self.accuracy_threshold = accuracy_threshold self.multi_spikes = multi_spikes self.multi_spikes_threshold = multi_spikes_threshold", "final_reweighting_prec: float = 1e-4, init_reweighting_prec: float = .2, decreasing: bool", "lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=True, multi_spikes_threshold=multi_spikes_threshold,", "class GenericFWSolverForLasso(TimedGenericIterativeAlgorithm): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float]", "np.linalg.norm( self.old_iterand['iterand']) self.diagnostics.loc[self.iter, 'Dual Certificate Value'] = self.dual_certificate_value # before", "in ['optimal', 'regular']: self.step_size = step_size else: raise ValueError(\"Step size", "iterand) + self.lambda_ * np.linalg.norm(iterand, 1) gamma /= np.linalg.norm(self.forwardOp @", "self.iterand_history.append(self.iterand['iterand']) def print_diagnostics(self): print(dict(self.diagnostics.loc[self.iter])) def stopping_metric(self): if self.iter == 0:", "= np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] = np.linalg.norm( self.old_iterand['iterand']", "active_indices.shape[0] > 1: x0 = iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec,", "DataFrame from copy import deepcopy from abc import abstractmethod from", "= (1 / 2) * SquaredL2Loss(dim=self.forwardOp.shape[0], data=self.data) self.data_fidelity = l22_loss", "init_iterand=init_iterand, max_iter=max_iter, min_iter=min_iter, accuracy_threshold=accuracy_threshold, verbose=verbose, t_max=t_max) def update_iterand(self) -> Any:", "self.combine_new_impulse() return res def compute_new_impulse(self): dual_certificate = - self.data_fidelity.gradient(self.old_iterand['iterand']) /", "# already present position if abs(self.dual_certificate_value) < 1.: if self.verbose", "= None): self.remove_positions = remove_positions self.reweighting_prec = init_reweighting_prec self.init_reweighting_prec =", "abc import abstractmethod from utils import TimedGenericIterativeAlgorithm import pycsou.core as", "decreasing self.dim = self.forwardOp.shape[1] self.x0 = np.zeros(self.dim) self.dual_certificate_value = 1", "t_max=t_max) def combine_new_impulse(self) -> Any: iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind", "= 10, remember_iterand: bool = False, step_size: str = 'optimal',", "new_positions} class PolyatomicFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_:", "* np.linalg.norm(iterand, 1) gamma /= np.linalg.norm(self.forwardOp @ iterand, 2) **", "pass def update_diagnostics(self): \"\"\" Dual ceritificate value is computed after", "active_indices = np.unique(np.hstack([iterand.nonzero()[0], self.new_ind])) else: active_indices = new_positions else: new_positions", "Improvement Objective']) elif self.stopping_strategy == 'certificate': value = self.diagnostics.loc[self.iter -", "str = 'ista', t_max: float = None): self.data = data", "* SquaredL2Loss(dim=self.forwardOp.shape[0], data=self.data) self.data_fidelity = l22_loss * self.forwardOp if lambda_", "self.new_ind])) else: active_indices = new_positions else: new_positions = self.old_iterand['positions'] if", "'Relative Improvement Iterand', 'Dual Certificate Value', 'Objective Function']) self.diagnostics.loc[self.iter, 'Iter']", "+ 1) self.reweighting_prec = max(self.reweighting_prec, self.final_reweighting_prec) return {'iterand': iterand, 'positions':", "is not None: iterand[self.new_ind] += gamma * np.sign(self.dual_certificate_value) * self.bound", "pycsou.func.penalty import L1Norm from pycsou.func.loss import SquaredL2Loss from pycsou.opt.proxalgs import", "'Relative Improvement Iterand'] = np.linalg.norm( self.old_iterand['iterand'] - self.iterand['iterand']) / np.linalg.norm(", "lambda_factor: Optional[float] = 0.1, min_iter: int = 10, max_iter: int", "> 0: print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) *", "init_reweighting_prec self.decreasing = decreasing self.final_reweighting_prec = final_reweighting_prec super(PolyatomicFWSolverForLasso, self).__init__(data, forwardOp,", "def combine_new_impulse(self): iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind is not None:", "triggered in practice print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue])", "from pycsou.func.penalty import L1Norm from pycsou.func.loss import SquaredL2Loss from pycsou.opt.proxalgs", "strategy must be in [\"relative_improvement\", \"certificate\"]') def restricted_support_lasso(self, active_indices: np.ndarray,", "self.last_weight = iterand[active_indices] overvalue = np.abs(iterand) > self.bound if overvalue.sum()", "at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound return {'iterand':", "2/(self.iter + 3) else: new_positions = self.old_iterand['positions'] if self.step_size ==", "be in ['optimal', 'regular']\") super(VanillaFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter,", "self.diagnostics.loc[ self.iter - 1, 'Objective Function'] if self.remember_iterand: self.iterand_history.append(self.iterand['iterand']) def", "if self.iter > 0 and self.remove_positions: active_indices = np.unique(np.hstack([iterand.nonzero()[0], self.new_ind]))", "new_positions else: new_positions = self.old_iterand['positions'] if self.iter > 0 and", "print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound return", "Function']) / \\ self.diagnostics.loc[ self.iter - 1, 'Objective Function'] if", "iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound if self.decreasing: self.reweighting_prec = self.init_reweighting_prec", "= remove_positions self.decreasing = decreasing self.dim = self.forwardOp.shape[1] self.x0 =", "self.diagnostics.loc[self.iter - 1, 'Dual Certificate Value'] return abs(abs(value) - 1)", "remove_positions self.decreasing = decreasing self.dim = self.forwardOp.shape[1] self.x0 = np.zeros(self.dim)", "new_positions if active_indices.shape[0] > 1: iterand[self.new_ind] = np.sign(self.dual_certificate_value) * self.last_weight", "gamma = np.dot(self.data_fidelity.gradient(iterand), iterand) + self.lambda_ * ( 1. *", "lambda_factor self.new_ind = None self.epsilon = None self.remember_iterand = remember_iterand", "= 'CD' tau = None elif self.reweighting == 'ista': tau", "(1 / 2) * SquaredL2Loss(dim=self.forwardOp.shape[0], data=self.data) self.data_fidelity = l22_loss *", "# print(\"Threshold: {} / {}\".format(threshold, maxi)) # print('Candidate indices: {}\\n'.format(indices.shape))", "{'iterand': iterand, 'positions': new_positions} class PolyatomicFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray,", "combine_new_impulse(self) -> Any: pass def update_diagnostics(self): \"\"\" Dual ceritificate value", "Any: pass def update_diagnostics(self): \"\"\" Dual ceritificate value is computed", "= self.lambda_ * L1Norm(dim=self.dim) objective_functional = self.data_fidelity + self.penalty self.bound", "np.setdiff1d(indices, self.old_iterand['positions'], assume_unique=True) if self.verbose is not None: self.candidate_new.append(indices.shape[0]) self.actual_new.append(self.new_ind.size)", "self.diagnostics.loc[self.iter, 'Dual Certificate Value'] = self.dual_certificate_value # before iteration self.diagnostics.loc[self.iter,", "self.diagnostics = DataFrame( columns=['Iter', 'Relative Improvement Objective', 'Relative Improvement Iterand',", "final_reweighting_prec super(PolyatomicFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold,", "+ self.penalty self.bound = np.linalg.norm(self.data) ** 2 / (2 *", "bool = False, remove_positions: bool = False, reweighting_prec: float =", "'regular']\") super(VanillaFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold,", "x0=x0) if self.new_ind is not None: self.last_weight = iterand[self.new_ind] else:", "= None self.epsilon = None self.remember_iterand = remember_iterand self.iterand_history =", "self.dual_certificate_value = dual_certificate[self.new_ind] if self.new_ind in self.old_iterand['positions']: self.new_ind = None", "= 2/(self.iter + 3) if not 0 < gamma <", "== 0: self.diagnostics = DataFrame( columns=['Iter', 'Relative Improvement Objective', 'Relative", "return {'iterand': iterand, 'positions': new_positions} class FullyCorrectiveFWSolverForLasso(VanillaFWSolverForLasso): def __init__(self, data:", "= np.unique(np.hstack([iterand.nonzero()[0], self.new_ind])) else: active_indices = new_positions else: new_positions =", "None: print('Warning, dual certificate lower than 1 at iteration {}'.format(self.iter))", "Iterand', 'Dual Certificate Value', 'Objective Function']) self.diagnostics.loc[self.iter, 'Iter'] = self.iter", "= 1.9 / restricted_data_fidelity.diff_lipschitz_cst acceleration = None else: raise ValueError('Reweighting", "self.bound) gamma /= np.linalg.norm(self.forwardOp.mat[:, self.new_ind] * self.bound * np.sign( self.dual_certificate_value)", "min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=True, multi_spikes_threshold=multi_spikes_threshold, reweighting='ista', t_max=t_max)", "np.ndarray, accuracy: float, x0: np.ndarray = None, d: float =", "active_indices, dtype=float).get_adjointOp() restricted_forward = pl.DenseLinearOperator( self.forwardOp.mat[:, active_indices]) restricted_forward.compute_lipschitz_cst(tol=1e-3) restricted_data_fidelity =", "self.lambda_ * np.linalg.norm(iterand, 1) gamma /= np.linalg.norm(self.forwardOp @ iterand, 2)", "self.old_iterand['iterand'] - self.iterand['iterand']) / np.linalg.norm( self.old_iterand['iterand']) self.diagnostics.loc[self.iter, 'Dual Certificate Value']", "= max(dual_certificate.min(), dual_certificate.max(), key=abs) else: self.new_ind = np.argmax(d) self.dual_certificate_value =", "False, remember_iterand: bool = False, decreasing: bool = False, multi_spikes_threshold:", "d = np.abs(dual_certificate) if self.multi_spikes: maxi = np.max(d) if self.iter", "= 'optimal', t_max: float = None): if step_size in ['optimal',", "if self.new_ind is not None: new_positions = np.unique(np.hstack([self.old_iterand['positions'], self.new_ind])) if", "'Dual Certificate Value'] = self.dual_certificate_value # before iteration self.diagnostics.loc[self.iter, 'Objective", "self.dim = self.forwardOp.shape[1] self.x0 = np.zeros(self.dim) self.dual_certificate_value = 1 /", "self.iter == 0: threshold = self.multi_spikes_threshold * maxi self.epsilon =", "max(dual_certificate.min(), dual_certificate.max(), key=abs) else: self.new_ind = np.argmax(d) self.dual_certificate_value = dual_certificate[self.new_ind]", "Improvement Iterand'] = np.linalg.norm( self.old_iterand['iterand'] - self.iterand['iterand']) / np.linalg.norm( self.old_iterand['iterand'])", "+= gamma * np.sign(self.dual_certificate_value) * self.bound return {'iterand': iterand, 'positions':", "(np.linalg.norm(column, 2) ** 2) self.last_weight = iterand[active_indices] overvalue = np.abs(iterand)", "== 'relative_improvement': return abs(self.diagnostics.loc[self.iter - 1, 'Relative Improvement Objective']) elif", "self.stopping_strategy == 'certificate': value = self.diagnostics.loc[self.iter - 1, 'Dual Certificate", "/ restricted_data_fidelity.diff_lipschitz_cst acceleration = None else: raise ValueError('Reweighting strategy must", "self.multi_spikes = multi_spikes self.multi_spikes_threshold = multi_spikes_threshold self.reweighting = reweighting self.remove_positions", "= self.lambda_ * L1Norm(dim=restricted_data_fidelity.shape[1]) if self.reweighting == 'fista': acceleration =", "'certificate', accuracy_threshold: float = 1e-4, verbose: Optional[int] = 10, remember_iterand:", "0 < gamma < 1: gamma = np.clip(gamma, 0., 1.)", "np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] = np.linalg.norm( self.old_iterand['iterand'] -", "else: active_indices = new_positions else: new_positions = self.old_iterand['positions'] if self.iter", "copy import deepcopy from abc import abstractmethod from utils import", "None self.epsilon = None self.remember_iterand = remember_iterand self.iterand_history = []", "> self.bound if overvalue.sum() > 0: print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue]))", "active_indices = new_positions else: new_positions = self.old_iterand['positions'] if self.iter >", "= reweighting_prec super(FullyCorrectiveFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy,", "np.sign(iterand[overvalue]) * self.bound return {'iterand': iterand, 'positions': new_positions} class PolyatomicFWSolverForLasso(GenericFWSolverForLasso):", "x0 is None: x0 = np.zeros(active_indices.shape) injection = pl.sampling.SubSampling(self.dim, active_indices,", "self.iterand_history = [] init_iterand = {'iterand': self.x0, 'positions': np.array([], dtype=int)}", "super(PolyatomicFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose,", "None if verbose is not None: self.candidate_new = [] self.actual_new", "dual_certificate.max(), key=abs) else: self.new_ind = np.argmax(d) self.dual_certificate_value = dual_certificate[self.new_ind] if", "if len(self.new_ind) == 0: self.new_ind = None self.dual_certificate_value = max(dual_certificate.min(),", "------- \"\"\" if self.iter == 0: self.diagnostics = DataFrame( columns=['Iter',", "if self.new_ind is not None: new_positions = np.hstack([self.old_iterand['positions'], self.new_ind]) if", "- 1, 'Objective Function'] if self.remember_iterand: self.iterand_history.append(self.iterand['iterand']) def print_diagnostics(self): print(dict(self.diagnostics.loc[self.iter]))", "self.data_fidelity.gradient(self.old_iterand['iterand']) / self.lambda_ d = np.abs(dual_certificate) if self.multi_spikes: maxi =", "iteration {}'.format(self.iter)) @abstractmethod def combine_new_impulse(self) -> Any: pass def update_diagnostics(self):", "'Objective Function']) self.diagnostics.loc[self.iter, 'Iter'] = self.iter if np.linalg.norm(self.old_iterand['iterand']) == 0:", "\"ista\"]') solver = APGD(dim=restricted_data_fidelity.shape[1], F=restricted_data_fidelity, G=restricted_regularization, x0=x0, tau=tau, acceleration=acceleration, verbose=None,", "500, stopping_strategy: str = 'certificate', accuracy_threshold: float = 1e-4, verbose:", "[] super(GenericFWSolverForLasso, self).__init__(objective_functional=objective_functional, init_iterand=init_iterand, max_iter=max_iter, min_iter=min_iter, accuracy_threshold=accuracy_threshold, verbose=verbose, t_max=t_max) def", "never been triggered in practice print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue]", "= False, step_size: str = 'optimal', t_max: float = None):", "= 10, remember_iterand: bool = False, remove_positions: bool = False,", "2) ** 2) overvalue = np.abs(iterand) > self.bound if overvalue.sum()", "self.multi_spikes_threshold * maxi self.epsilon = (1 - self.multi_spikes_threshold) * maxi", "acceleration=acceleration, verbose=None, accuracy_threshold=accuracy, d=d, max_iter=2000, min_iter=1) return injection(solver.iterate()[0]['iterand']) class VanillaFWSolverForLasso(GenericFWSolverForLasso):", "self.stopping_strategy = stopping_strategy self.accuracy_threshold = accuracy_threshold self.multi_spikes = multi_spikes self.multi_spikes_threshold", "= [] init_iterand = {'iterand': self.x0, 'positions': np.array([], dtype=int)} l22_loss", "verbose: Optional[int] = 10, remember_iterand: bool = False, remove_positions: bool", "self.compute_new_impulse() res = self.combine_new_impulse() return res def compute_new_impulse(self): dual_certificate =", "active_indices = np.unique(iterand.nonzero()[0]) else: active_indices = new_positions if active_indices.shape[0] >", "float = .7, multi_spikes: bool = True, reweighting: str =", "x0=x0) else: tmp = np.zeros(self.dim) tmp[active_indices] = 1. column =", "threshold = maxi - (1 / (self.iter + 2)) *", "self.actual_new.append(self.new_ind.size) if len(self.new_ind) == 0: self.new_ind = None self.dual_certificate_value =", "else: raise ValueError(\"Step size strategy must be in ['optimal', 'regular']\")", "self.data_fidelity + self.penalty self.bound = np.linalg.norm(self.data) ** 2 / (2", "np.sign( self.dual_certificate_value) - self.forwardOp @ iterand, 2) ** 2 else:", "t_max: float = None): if step_size in ['optimal', 'regular']: self.step_size", "= None): self.data = data self.forwardOp = forwardOp self.stopping_strategy =", "lambda_ is None: lambda_ = lambda_factor * np.abs(self.forwardOp.adjoint(self.data)).max() self.lambda_ =", "multi_spikes_threshold=multi_spikes_threshold, reweighting='ista', t_max=t_max) def combine_new_impulse(self): iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind", "multi_spikes=False, t_max=t_max) def combine_new_impulse(self) -> Any: iterand = deepcopy(self.old_iterand['iterand']) if", "than 1 at iteration {}'.format(self.iter)) @abstractmethod def combine_new_impulse(self) -> Any:", "gamma * np.sign(self.dual_certificate_value) * self.bound return {'iterand': iterand, 'positions': new_positions}", "self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand,", "self.iter > 0 and self.remove_positions: active_indices = np.unique(np.hstack([iterand.nonzero()[0], self.new_ind])) else:", "new_positions = np.unique(np.hstack([self.old_iterand['positions'], self.new_ind])) if self.iter > 0 and self.remove_positions:", "combine_new_impulse(self): iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind is not None: new_positions", "Iterand'] = np.infty else: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] = np.linalg.norm(", "SquaredL2Loss(dim=restricted_forward.shape[0], data=self.data) \\ * restricted_forward # restricted_data_fidelity.lipschitz_cst = self.data_fidelity.lipschitz_cst #", "L1Norm(dim=restricted_data_fidelity.shape[1]) if self.reweighting == 'fista': acceleration = 'CD' tau =", "iterand, 'positions': new_positions} class PolyatomicFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp:", "= 1. column = self.forwardOp(tmp) iterand[active_indices] = np.dot(self.data, column) /", "value = self.diagnostics.loc[self.iter - 1, 'Dual Certificate Value'] return abs(abs(value)", "self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) else: tmp = np.zeros(self.dim) tmp[active_indices] = 1.", "\"\"\" if self.iter == 0: self.diagnostics = DataFrame( columns=['Iter', 'Relative", "None else: raise ValueError('Reweighting strategy must be in [\"fista\", \"ista\"]')", "column) / (np.linalg.norm(column, 2) ** 2) self.last_weight = iterand[active_indices] overvalue", "/ np.linalg.norm( self.old_iterand['iterand']) self.diagnostics.loc[self.iter, 'Dual Certificate Value'] = self.dual_certificate_value #", "accuracy_threshold: float = 1e-4, verbose: Optional[int] = 10, remove_positions: bool", "- self.multi_spikes_threshold) * maxi else: threshold = maxi - (1", ".7, multi_spikes: bool = True, reweighting: str = 'ista', t_max:", "# print('Candidate indices: {}\\n'.format(indices.shape)) self.new_ind = np.setdiff1d(indices, self.old_iterand['positions'], assume_unique=True) if", "None: x0 = np.zeros(active_indices.shape) injection = pl.sampling.SubSampling(self.dim, active_indices, dtype=float).get_adjointOp() restricted_forward", "self.last_weight = self.bound def combine_new_impulse(self) -> Any: iterand = deepcopy(self.old_iterand['iterand'])", "remove_positions self.reweighting_prec = reweighting_prec super(FullyCorrectiveFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter,", "accuracy_threshold self.multi_spikes = multi_spikes self.multi_spikes_threshold = multi_spikes_threshold self.reweighting = reweighting", "> self.bound if overvalue.sum() > 0: #Sanity check, never been", "verbose: Optional[int] = 10, remember_iterand: bool = False, step_size: str", "pl.sampling.SubSampling(self.dim, active_indices, dtype=float).get_adjointOp() restricted_forward = pl.DenseLinearOperator( self.forwardOp.mat[:, active_indices]) restricted_forward.compute_lipschitz_cst(tol=1e-3) restricted_data_fidelity", "print('Warning, dual certificate lower than 1 at iteration {}'.format(self.iter)) @abstractmethod", "self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0) if self.new_ind is not None: self.last_weight =", "self.lambda_) self.start = None if verbose is not None: self.candidate_new", "self.forwardOp @ iterand, 2) ** 2 else: gamma = 2/(self.iter", "np.array([], dtype=int)} l22_loss = (1 / 2) * SquaredL2Loss(dim=self.forwardOp.shape[0], data=self.data)", "None self.remember_iterand = remember_iterand self.iterand_history = [] init_iterand = {'iterand':", "np.sign(self.dual_certificate_value) * self.last_weight x0 = iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec,", "float = None): self.remove_positions = remove_positions self.reweighting_prec = reweighting_prec super(FullyCorrectiveFWSolverForLasso,", "2) ** 2) self.last_weight = iterand[active_indices] overvalue = np.abs(iterand) >", "- 1, 'Relative Improvement Objective']) elif self.stopping_strategy == 'certificate': value", "* np.abs(self.forwardOp.adjoint(self.data)).max() self.lambda_ = lambda_ self.penalty = self.lambda_ * L1Norm(dim=self.dim)", "= np.unique(np.hstack([self.old_iterand['positions'], self.new_ind])) if self.iter > 0 and self.remove_positions: active_indices", "self.lambda_ = lambda_ self.penalty = self.lambda_ * L1Norm(dim=self.dim) objective_functional =", "None self.dual_certificate_value = max(dual_certificate.min(), dual_certificate.max(), key=abs) else: self.new_ind = np.argmax(d)", "from copy import deepcopy from abc import abstractmethod from utils", "import numpy as np from typing import Optional, Any from", "self.verbose is not None: self.candidate_new.append(indices.shape[0]) self.actual_new.append(self.new_ind.size) if len(self.new_ind) == 0:", "abs(self.dual_certificate_value) < 1.: if self.verbose is not None: print('Warning, dual", "min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, t_max=t_max) self.reweighting = reweighting", "def restricted_support_lasso(self, active_indices: np.ndarray, accuracy: float, x0: np.ndarray = None,", "not None: print('Warning, dual certificate lower than 1 at iteration", "2 else: gamma = 2/(self.iter + 3) if not 0", "= self.combine_new_impulse() return res def compute_new_impulse(self): dual_certificate = - self.data_fidelity.gradient(self.old_iterand['iterand'])", "restricted_support_lasso(self, active_indices: np.ndarray, accuracy: float, x0: np.ndarray = None, d:", "maxi)) # print('Candidate indices: {}\\n'.format(indices.shape)) self.new_ind = np.setdiff1d(indices, self.old_iterand['positions'], assume_unique=True)", "float = 1e-4, init_reweighting_prec: float = .2, decreasing: bool =", "= lambda_factor * np.abs(self.forwardOp.adjoint(self.data)).max() self.lambda_ = lambda_ self.penalty = self.lambda_", "= self.multi_spikes_threshold * maxi self.epsilon = (1 - self.multi_spikes_threshold) *", "reweighting: str = 'fista', t_max: float = None): self.remove_positions =", "= np.zeros(self.dim) tmp[active_indices] = 1. column = self.forwardOp(tmp) iterand[active_indices] =", "else: self.diagnostics.loc[self.iter, 'Relative Improvement Iterand'] = np.linalg.norm( self.old_iterand['iterand'] - self.iterand['iterand'])", "Function'] - self.diagnostics.loc[ self.iter, 'Objective Function']) / \\ self.diagnostics.loc[ self.iter", "lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=False, t_max=t_max) def", "= multi_spikes_threshold self.reweighting = reweighting self.remove_positions = remove_positions self.decreasing =", "-> Any: iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind is not None:", "multi_spikes=True, multi_spikes_threshold=multi_spikes_threshold, reweighting='ista', t_max=t_max) def combine_new_impulse(self): iterand = deepcopy(self.old_iterand['iterand']) if", "active_indices = new_positions if active_indices.shape[0] > 1: x0 = iterand[active_indices]", "ValueError('Reweighting strategy must be in [\"fista\", \"ista\"]') solver = APGD(dim=restricted_data_fidelity.shape[1],", "super(FullyCorrectiveFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_, lambda_factor=lambda_factor, min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose,", "self.remove_positions = remove_positions self.reweighting_prec = reweighting_prec super(FullyCorrectiveFWSolverForLasso, self).__init__(data, forwardOp, lambda_=lambda_,", "<gh_stars>0 import numpy as np from typing import Optional, Any", "self.bound if overvalue.sum() > 0: #Sanity check, never been triggered", "VanillaFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator, lambda_: Optional[float] =", "> 0 and self.remove_positions: active_indices = np.unique(iterand.nonzero()[0]) else: active_indices =", "= 'certificate', accuracy_threshold: float = 1e-4, verbose: Optional[int] = 10,", "- 1, 'Objective Function'] - self.diagnostics.loc[ self.iter, 'Objective Function']) /", "np.abs(dual_certificate) if self.multi_spikes: maxi = np.max(d) if self.iter == 0:", "float, x0: np.ndarray = None, d: float = 75.): if", "restricted_forward.compute_lipschitz_cst(tol=1e-3) restricted_data_fidelity = (1 / 2) * SquaredL2Loss(dim=restricted_forward.shape[0], data=self.data) \\", "self.data_fidelity = l22_loss * self.forwardOp if lambda_ is None: lambda_", "{}\\n'.format(indices.shape)) self.new_ind = np.setdiff1d(indices, self.old_iterand['positions'], assume_unique=True) if self.verbose is not", "= None # already present position if abs(self.dual_certificate_value) < 1.:", "Any: self.compute_new_impulse() res = self.combine_new_impulse() return res def compute_new_impulse(self): dual_certificate", "in [\"fista\", \"ista\"]') solver = APGD(dim=restricted_data_fidelity.shape[1], F=restricted_data_fidelity, G=restricted_regularization, x0=x0, tau=tau,", "step_size in ['optimal', 'regular']: self.step_size = step_size else: raise ValueError(\"Step", "self.iter == 0: self.diagnostics = DataFrame( columns=['Iter', 'Relative Improvement Objective',", "reweighting='ista', t_max=t_max) def combine_new_impulse(self): iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind is", "dual certificate lower than 1 at iteration {}'.format(self.iter)) @abstractmethod def", "min_iter=min_iter, max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=False, t_max=t_max) def combine_new_impulse(self)", "combine_new_impulse(self) -> Any: iterand = deepcopy(self.old_iterand['iterand']) if self.new_ind is not", "None: new_positions = np.unique(np.hstack([self.old_iterand['positions'], self.new_ind])) if self.iter > 0 and", "+ self.lambda_ * np.linalg.norm(iterand, 1) gamma /= np.linalg.norm(self.forwardOp @ iterand,", "self.bound if self.decreasing: self.reweighting_prec = self.init_reweighting_prec / (self.iter + 1)", "None, d: float = 75.): if x0 is None: x0", "False, reweighting_prec: float = 1e-4, reweighting: str = 'fista', t_max:", "/= np.linalg.norm(self.forwardOp.mat[:, self.new_ind] * self.bound * np.sign( self.dual_certificate_value) - self.forwardOp", "max_iter=max_iter, stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=True, multi_spikes_threshold=multi_spikes_threshold, reweighting='ista', t_max=t_max) def", "None: self.candidate_new.append(indices.shape[0]) self.actual_new.append(self.new_ind.size) if len(self.new_ind) == 0: self.new_ind = None", "self.diagnostics.loc[ self.iter, 'Objective Function']) / \\ self.diagnostics.loc[ self.iter - 1,", "if x0 is None: x0 = np.zeros(active_indices.shape) injection = pl.sampling.SubSampling(self.dim,", "self.new_ind in self.old_iterand['positions']: self.new_ind = None # already present position", "else: tmp = np.zeros(self.dim) tmp[active_indices] = 1. column = self.forwardOp(tmp)", "= .7, t_max: float = None): self.remove_positions = remove_positions self.reweighting_prec", "0: print(\"Overvalue at coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound", "> 1: x0 = iterand[active_indices] iterand = self.restricted_support_lasso(active_indices, self.reweighting_prec, x0=x0)", "@abstractmethod def combine_new_impulse(self) -> Any: pass def update_diagnostics(self): \"\"\" Dual", "np.infty elif self.stopping_strategy == 'relative_improvement': return abs(self.diagnostics.loc[self.iter - 1, 'Relative", "APGD(dim=restricted_data_fidelity.shape[1], F=restricted_data_fidelity, G=restricted_regularization, x0=x0, tau=tau, acceleration=acceleration, verbose=None, accuracy_threshold=accuracy, d=d, max_iter=2000,", "(self.iter + 1) self.reweighting_prec = max(self.reweighting_prec, self.final_reweighting_prec) return {'iterand': iterand,", "accuracy_threshold=accuracy_threshold, verbose=verbose, t_max=t_max) def update_iterand(self) -> Any: self.compute_new_impulse() res =", "stopping_strategy=stopping_strategy, accuracy_threshold=accuracy_threshold, verbose=verbose, remember_iterand=remember_iterand, multi_spikes=False, t_max=t_max) def combine_new_impulse(self) -> Any:", "self.epsilon = None self.remember_iterand = remember_iterand self.iterand_history = [] init_iterand", "None): self.data = data self.forwardOp = forwardOp self.stopping_strategy = stopping_strategy", "before iteration self.diagnostics.loc[self.iter, 'Objective Function'] = self.objective_functional(self.iterand['iterand']) if self.iter ==", "if self.decreasing: self.reweighting_prec = self.init_reweighting_prec / (self.iter + 1) self.reweighting_prec", "self.epsilon = (1 - self.multi_spikes_threshold) * maxi else: threshold =", "- 1.) * self.bound) gamma /= np.linalg.norm(self.forwardOp.mat[:, self.new_ind] * self.bound", "overvalue.sum() > 0: #Sanity check, never been triggered in practice", "/ (self.iter + 1) self.reweighting_prec = max(self.reweighting_prec, self.final_reweighting_prec) return {'iterand':", "pycsou.core as pcore import pycsou.linop as pl from pycsou.func.penalty import", "= decreasing self.dim = self.forwardOp.shape[1] self.x0 = np.zeros(self.dim) self.dual_certificate_value =", "== 0: threshold = self.multi_spikes_threshold * maxi self.epsilon = (1", "1. * np.linalg.norm(iterand, 1) + (np.abs(self.dual_certificate_value) - 1.) * self.bound)", "= multi_spikes self.multi_spikes_threshold = multi_spikes_threshold self.reweighting = reweighting self.remove_positions =", "(self.iter + 2)) * self.epsilon indices = np.where(d > max(threshold,", "(self.diagnostics.loc[ self.iter - 1, 'Objective Function'] - self.diagnostics.loc[ self.iter, 'Objective", "indices: {}\\n'.format(indices.shape)) self.new_ind = np.setdiff1d(indices, self.old_iterand['positions'], assume_unique=True) if self.verbose is", "np.sign(iterand[overvalue]) * self.bound if self.decreasing: self.reweighting_prec = self.init_reweighting_prec / (self.iter", "coordinates {}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound if self.decreasing: self.reweighting_prec", "1e-4, init_reweighting_prec: float = .2, decreasing: bool = False, multi_spikes_threshold:", "init_iterand = {'iterand': self.x0, 'positions': np.array([], dtype=int)} l22_loss = (1", "ValueError('Stopping strategy must be in [\"relative_improvement\", \"certificate\"]') def restricted_support_lasso(self, active_indices:", "float = .7, t_max: float = None): self.remove_positions = remove_positions", "self.dual_certificate_value = max(dual_certificate.min(), dual_certificate.max(), key=abs) else: self.new_ind = np.argmax(d) self.dual_certificate_value", "'Objective Function'] - self.diagnostics.loc[ self.iter, 'Objective Function']) / \\ self.diagnostics.loc[", "return injection(solver.iterate()[0]['iterand']) class VanillaFWSolverForLasso(GenericFWSolverForLasso): def __init__(self, data: np.ndarray, forwardOp: pcore.linop.LinearOperator,", "= None): if step_size in ['optimal', 'regular']: self.step_size = step_size", "self.decreasing = decreasing self.dim = self.forwardOp.shape[1] self.x0 = np.zeros(self.dim) self.dual_certificate_value", "print('Candidate indices: {}\\n'.format(indices.shape)) self.new_ind = np.setdiff1d(indices, self.old_iterand['positions'], assume_unique=True) if self.verbose", "from utils import TimedGenericIterativeAlgorithm import pycsou.core as pcore import pycsou.linop", "{}\".format(np.arange(overvalue.shape[0])[overvalue])) iterand[overvalue] = np.sign(iterand[overvalue]) * self.bound return {'iterand': iterand, 'positions':", "self.old_iterand['positions'] if self.step_size == 'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand), iterand) +", "data self.forwardOp = forwardOp self.stopping_strategy = stopping_strategy self.accuracy_threshold = accuracy_threshold", ".2, decreasing: bool = False, multi_spikes_threshold: float = .7, t_max:", "= pl.sampling.SubSampling(self.dim, active_indices, dtype=float).get_adjointOp() restricted_forward = pl.DenseLinearOperator( self.forwardOp.mat[:, active_indices]) restricted_forward.compute_lipschitz_cst(tol=1e-3)", "[\"relative_improvement\", \"certificate\"]') def restricted_support_lasso(self, active_indices: np.ndarray, accuracy: float, x0: np.ndarray", "pycsou.linop as pl from pycsou.func.penalty import L1Norm from pycsou.func.loss import", "0 and self.remove_positions: active_indices = np.unique(np.hstack([iterand.nonzero()[0], self.new_ind])) else: active_indices =", "TimedGenericIterativeAlgorithm import pycsou.core as pcore import pycsou.linop as pl from", "10, remove_positions: bool = False, remember_iterand: bool = False, decreasing:", "reweighting_prec: float = 1e-4, reweighting: str = 'fista', t_max: float", "self.objective_functional(self.iterand['iterand']) if self.iter == 0: self.diagnostics.loc[self.iter, 'Relative Improvement Objective'] =", "not None: iterand[self.new_ind] += gamma * np.sign(self.dual_certificate_value) * self.bound return", "= data self.forwardOp = forwardOp self.stopping_strategy = stopping_strategy self.accuracy_threshold =", "['optimal', 'regular']: self.step_size = step_size else: raise ValueError(\"Step size strategy", "lambda_ = lambda_factor * np.abs(self.forwardOp.adjoint(self.data)).max() self.lambda_ = lambda_ self.penalty =", "Certificate Value'] = self.dual_certificate_value # before iteration self.diagnostics.loc[self.iter, 'Objective Function']", "is None: lambda_ = lambda_factor * np.abs(self.forwardOp.adjoint(self.data)).max() self.lambda_ = lambda_", "self.remember_iterand: self.iterand_history.append(self.iterand['iterand']) def print_diagnostics(self): print(dict(self.diagnostics.loc[self.iter])) def stopping_metric(self): if self.iter ==", "= None self.remember_iterand = remember_iterand self.iterand_history = [] init_iterand =", "self.forwardOp.shape[1] self.x0 = np.zeros(self.dim) self.dual_certificate_value = 1 / lambda_factor self.new_ind", "= self.old_iterand['positions'] if self.step_size == 'optimal': gamma = np.dot(self.data_fidelity.gradient(iterand), iterand)", "Optional[int] = 10, remember_iterand: bool = False, remove_positions: bool =", "in self.old_iterand['positions']: self.new_ind = None # already present position if", "pandas import DataFrame from copy import deepcopy from abc import" ]
[ "g.serialize(format='trix') g3 = ConjunctiveGraph() from StringIO import StringIO g3.parse(StringIO(r), format='trix')", "r2 = URIRef('resource:2') label = URIRef('predicate:label') g1 = Graph(identifier =", "isinstance(q[3].identifier, URIRef): tg=Graph(store=g.store, identifier=q[3].identifier) else: # BNode, this is a", "# TODO: Fix once getGraph/getContext is in conjunctive graph if", "unittest from rdflib.graph import ConjunctiveGraph from rdflib.term import URIRef, Literal", "from rdflib.term import URIRef, Literal from rdflib.graph import Graph class", "URIRef('store:1') r1 = URIRef('resource:1') r2 = URIRef('resource:2') label = URIRef('predicate:label')", "URIRef('resource:3') g.add((r3, label, Literal(4))) r = g.serialize(format='trix') g3 = ConjunctiveGraph()", "import Graph class TestTrixSerialize(unittest.TestCase): def setUp(self): pass def tearDown(self): pass", "None, None)): g.addN([(s,p,o,g1)]) for s,p,o in g2.triples((None, None, None)): g.addN([(s,p,o,g2)])", "r3 = URIRef('resource:3') g.add((r3, label, Literal(4))) r = g.serialize(format='trix') g3", "None)): g.addN([(s,p,o,g2)]) r3 = URIRef('resource:3') g.add((r3, label, Literal(4))) r =", "= ConjunctiveGraph() from StringIO import StringIO g3.parse(StringIO(r), format='trix') for q", "and that is the default one, but this is not", "None, None)): g.addN([(s,p,o,g2)]) r3 = URIRef('resource:3') g.add((r3, label, Literal(4))) r", "s,p,o in g1.triples((None, None, None)): g.addN([(s,p,o,g1)]) for s,p,o in g2.triples((None,", "bit ugly # we cannot match the bnode to the", "2\"))) s2 = URIRef('store:2') g2 = Graph(identifier = s2) g2.add((r2,", "Graph(identifier = s1) g1.add((r1, label, Literal(\"label 1\", lang=\"en\"))) g1.add((r1, label,", "g3.quads((None,None,None)): # TODO: Fix once getGraph/getContext is in conjunctive graph", "ConjunctiveGraph() from StringIO import StringIO g3.parse(StringIO(r), format='trix') for q in", "once getGraph/getContext is in conjunctive graph if isinstance(q[3].identifier, URIRef): tg=Graph(store=g.store,", "this is a bit ugly # we cannot match the", "is not always the case tg=g.default_context self.assertTrue(q[0:3] in tg) if", "ConjunctiveGraph() for s,p,o in g1.triples((None, None, None)): g.addN([(s,p,o,g1)]) for s,p,o", "# here I know there is only one anonymous graph,", "pass def testSerialize(self): s1 = URIRef('store:1') r1 = URIRef('resource:1') r2", "= ConjunctiveGraph() for s,p,o in g1.triples((None, None, None)): g.addN([(s,p,o,g1)]) for", "graph automagically # here I know there is only one", "s2) g2.add((r2, label, Literal(\"label 3\"))) g = ConjunctiveGraph() for s,p,o", "know there is only one anonymous graph, # and that", "bnode to the right graph automagically # here I know", "right graph automagically # here I know there is only", "that is the default one, but this is not always", "r = g.serialize(format='trix') g3 = ConjunctiveGraph() from StringIO import StringIO", "def tearDown(self): pass def testSerialize(self): s1 = URIRef('store:1') r1 =", "match the bnode to the right graph automagically # here", "g2 = Graph(identifier = s2) g2.add((r2, label, Literal(\"label 3\"))) g", "# we cannot match the bnode to the right graph", "rdflib.graph import Graph class TestTrixSerialize(unittest.TestCase): def setUp(self): pass def tearDown(self):", "not always the case tg=g.default_context self.assertTrue(q[0:3] in tg) if __name__=='__main__':", "= g.serialize(format='trix') g3 = ConjunctiveGraph() from StringIO import StringIO g3.parse(StringIO(r),", "tg=Graph(store=g.store, identifier=q[3].identifier) else: # BNode, this is a bit ugly", "g.addN([(s,p,o,g1)]) for s,p,o in g2.triples((None, None, None)): g.addN([(s,p,o,g2)]) r3 =", "URIRef, Literal from rdflib.graph import Graph class TestTrixSerialize(unittest.TestCase): def setUp(self):", "rdflib.graph import ConjunctiveGraph from rdflib.term import URIRef, Literal from rdflib.graph", "# and that is the default one, but this is", "automagically # here I know there is only one anonymous", "only one anonymous graph, # and that is the default", "conjunctive graph if isinstance(q[3].identifier, URIRef): tg=Graph(store=g.store, identifier=q[3].identifier) else: # BNode,", "g2.triples((None, None, None)): g.addN([(s,p,o,g2)]) r3 = URIRef('resource:3') g.add((r3, label, Literal(4)))", "label, Literal(\"label 1\", lang=\"en\"))) g1.add((r1, label, Literal(\"label 2\"))) s2 =", "testSerialize(self): s1 = URIRef('store:1') r1 = URIRef('resource:1') r2 = URIRef('resource:2')", "label = URIRef('predicate:label') g1 = Graph(identifier = s1) g1.add((r1, label,", "for s,p,o in g2.triples((None, None, None)): g.addN([(s,p,o,g2)]) r3 = URIRef('resource:3')", "cannot match the bnode to the right graph automagically #", "ConjunctiveGraph from rdflib.term import URIRef, Literal from rdflib.graph import Graph", "label, Literal(\"label 2\"))) s2 = URIRef('store:2') g2 = Graph(identifier =", "StringIO import StringIO g3.parse(StringIO(r), format='trix') for q in g3.quads((None,None,None)): #", "= URIRef('store:2') g2 = Graph(identifier = s2) g2.add((r2, label, Literal(\"label", "= Graph(identifier = s1) g1.add((r1, label, Literal(\"label 1\", lang=\"en\"))) g1.add((r1,", "anonymous graph, # and that is the default one, but", "python import unittest from rdflib.graph import ConjunctiveGraph from rdflib.term import", "URIRef('resource:1') r2 = URIRef('resource:2') label = URIRef('predicate:label') g1 = Graph(identifier", "BNode, this is a bit ugly # we cannot match", "URIRef('resource:2') label = URIRef('predicate:label') g1 = Graph(identifier = s1) g1.add((r1,", "= Graph(identifier = s2) g2.add((r2, label, Literal(\"label 3\"))) g =", "lang=\"en\"))) g1.add((r1, label, Literal(\"label 2\"))) s2 = URIRef('store:2') g2 =", "tearDown(self): pass def testSerialize(self): s1 = URIRef('store:1') r1 = URIRef('resource:1')", "Literal(\"label 1\", lang=\"en\"))) g1.add((r1, label, Literal(\"label 2\"))) s2 = URIRef('store:2')", "Literal(\"label 3\"))) g = ConjunctiveGraph() for s,p,o in g1.triples((None, None,", "= URIRef('resource:3') g.add((r3, label, Literal(4))) r = g.serialize(format='trix') g3 =", "format='trix') for q in g3.quads((None,None,None)): # TODO: Fix once getGraph/getContext", "one anonymous graph, # and that is the default one,", "default one, but this is not always the case tg=g.default_context", "one, but this is not always the case tg=g.default_context self.assertTrue(q[0:3]", "import ConjunctiveGraph from rdflib.term import URIRef, Literal from rdflib.graph import", "URIRef('predicate:label') g1 = Graph(identifier = s1) g1.add((r1, label, Literal(\"label 1\",", "g3 = ConjunctiveGraph() from StringIO import StringIO g3.parse(StringIO(r), format='trix') for", "here I know there is only one anonymous graph, #", "= URIRef('resource:1') r2 = URIRef('resource:2') label = URIRef('predicate:label') g1 =", "g1.triples((None, None, None)): g.addN([(s,p,o,g1)]) for s,p,o in g2.triples((None, None, None)):", "g3.parse(StringIO(r), format='trix') for q in g3.quads((None,None,None)): # TODO: Fix once", "TODO: Fix once getGraph/getContext is in conjunctive graph if isinstance(q[3].identifier,", "s2 = URIRef('store:2') g2 = Graph(identifier = s2) g2.add((r2, label,", "always the case tg=g.default_context self.assertTrue(q[0:3] in tg) if __name__=='__main__': unittest.main()", "to the right graph automagically # here I know there", "we cannot match the bnode to the right graph automagically", "g2.add((r2, label, Literal(\"label 3\"))) g = ConjunctiveGraph() for s,p,o in", "this is not always the case tg=g.default_context self.assertTrue(q[0:3] in tg)", "the bnode to the right graph automagically # here I", "I know there is only one anonymous graph, # and", "setUp(self): pass def tearDown(self): pass def testSerialize(self): s1 = URIRef('store:1')", "= URIRef('predicate:label') g1 = Graph(identifier = s1) g1.add((r1, label, Literal(\"label", "from rdflib.graph import Graph class TestTrixSerialize(unittest.TestCase): def setUp(self): pass def", "import unittest from rdflib.graph import ConjunctiveGraph from rdflib.term import URIRef,", "None)): g.addN([(s,p,o,g1)]) for s,p,o in g2.triples((None, None, None)): g.addN([(s,p,o,g2)]) r3", "1\", lang=\"en\"))) g1.add((r1, label, Literal(\"label 2\"))) s2 = URIRef('store:2') g2", "the right graph automagically # here I know there is", "#!/usr/bin/env python import unittest from rdflib.graph import ConjunctiveGraph from rdflib.term", "g1.add((r1, label, Literal(\"label 1\", lang=\"en\"))) g1.add((r1, label, Literal(\"label 2\"))) s2", "label, Literal(4))) r = g.serialize(format='trix') g3 = ConjunctiveGraph() from StringIO", "graph if isinstance(q[3].identifier, URIRef): tg=Graph(store=g.store, identifier=q[3].identifier) else: # BNode, this", "URIRef): tg=Graph(store=g.store, identifier=q[3].identifier) else: # BNode, this is a bit", "g.add((r3, label, Literal(4))) r = g.serialize(format='trix') g3 = ConjunctiveGraph() from", "for q in g3.quads((None,None,None)): # TODO: Fix once getGraph/getContext is", "Graph(identifier = s2) g2.add((r2, label, Literal(\"label 3\"))) g = ConjunctiveGraph()", "identifier=q[3].identifier) else: # BNode, this is a bit ugly #", "is in conjunctive graph if isinstance(q[3].identifier, URIRef): tg=Graph(store=g.store, identifier=q[3].identifier) else:", "s,p,o in g2.triples((None, None, None)): g.addN([(s,p,o,g2)]) r3 = URIRef('resource:3') g.add((r3,", "URIRef('store:2') g2 = Graph(identifier = s2) g2.add((r2, label, Literal(\"label 3\")))", "graph, # and that is the default one, but this", "= s2) g2.add((r2, label, Literal(\"label 3\"))) g = ConjunctiveGraph() for", "Fix once getGraph/getContext is in conjunctive graph if isinstance(q[3].identifier, URIRef):", "StringIO g3.parse(StringIO(r), format='trix') for q in g3.quads((None,None,None)): # TODO: Fix", "pass def tearDown(self): pass def testSerialize(self): s1 = URIRef('store:1') r1", "import URIRef, Literal from rdflib.graph import Graph class TestTrixSerialize(unittest.TestCase): def", "in g2.triples((None, None, None)): g.addN([(s,p,o,g2)]) r3 = URIRef('resource:3') g.add((r3, label,", "in conjunctive graph if isinstance(q[3].identifier, URIRef): tg=Graph(store=g.store, identifier=q[3].identifier) else: #", "def setUp(self): pass def tearDown(self): pass def testSerialize(self): s1 =", "s1 = URIRef('store:1') r1 = URIRef('resource:1') r2 = URIRef('resource:2') label", "Literal from rdflib.graph import Graph class TestTrixSerialize(unittest.TestCase): def setUp(self): pass", "s1) g1.add((r1, label, Literal(\"label 1\", lang=\"en\"))) g1.add((r1, label, Literal(\"label 2\")))", "rdflib.term import URIRef, Literal from rdflib.graph import Graph class TestTrixSerialize(unittest.TestCase):", "label, Literal(\"label 3\"))) g = ConjunctiveGraph() for s,p,o in g1.triples((None,", "a bit ugly # we cannot match the bnode to", "there is only one anonymous graph, # and that is", "but this is not always the case tg=g.default_context self.assertTrue(q[0:3] in", "in g1.triples((None, None, None)): g.addN([(s,p,o,g1)]) for s,p,o in g2.triples((None, None,", "g1.add((r1, label, Literal(\"label 2\"))) s2 = URIRef('store:2') g2 = Graph(identifier", "# BNode, this is a bit ugly # we cannot", "the default one, but this is not always the case", "is the default one, but this is not always the", "= s1) g1.add((r1, label, Literal(\"label 1\", lang=\"en\"))) g1.add((r1, label, Literal(\"label", "if isinstance(q[3].identifier, URIRef): tg=Graph(store=g.store, identifier=q[3].identifier) else: # BNode, this is", "in g3.quads((None,None,None)): # TODO: Fix once getGraph/getContext is in conjunctive", "from rdflib.graph import ConjunctiveGraph from rdflib.term import URIRef, Literal from", "q in g3.quads((None,None,None)): # TODO: Fix once getGraph/getContext is in", "else: # BNode, this is a bit ugly # we", "r1 = URIRef('resource:1') r2 = URIRef('resource:2') label = URIRef('predicate:label') g1", "ugly # we cannot match the bnode to the right", "g1 = Graph(identifier = s1) g1.add((r1, label, Literal(\"label 1\", lang=\"en\")))", "getGraph/getContext is in conjunctive graph if isinstance(q[3].identifier, URIRef): tg=Graph(store=g.store, identifier=q[3].identifier)", "g.addN([(s,p,o,g2)]) r3 = URIRef('resource:3') g.add((r3, label, Literal(4))) r = g.serialize(format='trix')", "class TestTrixSerialize(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def testSerialize(self):", "Literal(\"label 2\"))) s2 = URIRef('store:2') g2 = Graph(identifier = s2)", "def testSerialize(self): s1 = URIRef('store:1') r1 = URIRef('resource:1') r2 =", "Graph class TestTrixSerialize(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def", "import StringIO g3.parse(StringIO(r), format='trix') for q in g3.quads((None,None,None)): # TODO:", "3\"))) g = ConjunctiveGraph() for s,p,o in g1.triples((None, None, None)):", "g = ConjunctiveGraph() for s,p,o in g1.triples((None, None, None)): g.addN([(s,p,o,g1)])", "Literal(4))) r = g.serialize(format='trix') g3 = ConjunctiveGraph() from StringIO import", "is only one anonymous graph, # and that is the", "= URIRef('store:1') r1 = URIRef('resource:1') r2 = URIRef('resource:2') label =", "is a bit ugly # we cannot match the bnode", "for s,p,o in g1.triples((None, None, None)): g.addN([(s,p,o,g1)]) for s,p,o in", "from StringIO import StringIO g3.parse(StringIO(r), format='trix') for q in g3.quads((None,None,None)):", "= URIRef('resource:2') label = URIRef('predicate:label') g1 = Graph(identifier = s1)", "TestTrixSerialize(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def testSerialize(self): s1" ]
[ "print \"\\033[35mRevision Updated in the database as %d\\033[0m\" % checkout_revision", "updatelib_inst.updateDatabaseVersion(checkout_version) print \"\\033[35mVersion Updated in the database %s\\033[0m\" % checkout_version", "sinedon import dbupgrade, dbconfig import updatelib project_dbupgrade = dbupgrade.DBUpgradeTools('projectdata', drop=True)", "checkout version, for example, 2.1 -->') if checkout_version != 'trunk':", "= dbupgrade.DBUpgradeTools('projectdata', drop=True) if __name__ == \"__main__\": updatelib_inst = updatelib.UpdateLib(project_dbupgrade)", "x:int(x)),checkout_version.split('.')[:2]) except: print \"valid versions are 'trunk', '2.1', or '2.1.2'", "__name__ == \"__main__\": updatelib_inst = updatelib.UpdateLib(project_dbupgrade) checkout_version = raw_input('Revert to", "checkout revision, for example, 16500 -->')) updatelib_inst.updateDatabaseVersion(checkout_version) print \"\\033[35mVersion Updated", "the database %s\\033[0m\" % checkout_version updatelib_inst.updateDatabaseRevision(checkout_revision) print \"\\033[35mRevision Updated in", "checkout_version updatelib_inst.updateDatabaseRevision(checkout_revision) print \"\\033[35mRevision Updated in the database as %d\\033[0m\"", "to checkout version, for example, 2.1 -->') if checkout_version !=", "= int(raw_input('Revert to checkout revision, for example, 16500 -->')) updatelib_inst.updateDatabaseVersion(checkout_version)", "project_dbupgrade = dbupgrade.DBUpgradeTools('projectdata', drop=True) if __name__ == \"__main__\": updatelib_inst =", "#!/usr/bin/env python from sinedon import dbupgrade, dbconfig import updatelib project_dbupgrade", "print \"\\033[35mVersion Updated in the database %s\\033[0m\" % checkout_version updatelib_inst.updateDatabaseRevision(checkout_revision)", "dbupgrade, dbconfig import updatelib project_dbupgrade = dbupgrade.DBUpgradeTools('projectdata', drop=True) if __name__", "from sinedon import dbupgrade, dbconfig import updatelib project_dbupgrade = dbupgrade.DBUpgradeTools('projectdata',", "map((lambda x:int(x)),checkout_version.split('.')[:2]) except: print \"valid versions are 'trunk', '2.1', or", "database %s\\033[0m\" % checkout_version updatelib_inst.updateDatabaseRevision(checkout_revision) print \"\\033[35mRevision Updated in the", "except: print \"valid versions are 'trunk', '2.1', or '2.1.2' etc\"", "versions are 'trunk', '2.1', or '2.1.2' etc\" raise checkout_revision =", "int(raw_input('Revert to checkout revision, for example, 16500 -->')) updatelib_inst.updateDatabaseVersion(checkout_version) print", "python from sinedon import dbupgrade, dbconfig import updatelib project_dbupgrade =", "if checkout_version != 'trunk': try: map((lambda x:int(x)),checkout_version.split('.')[:2]) except: print \"valid", "checkout_version != 'trunk': try: map((lambda x:int(x)),checkout_version.split('.')[:2]) except: print \"valid versions", "dbupgrade.DBUpgradeTools('projectdata', drop=True) if __name__ == \"__main__\": updatelib_inst = updatelib.UpdateLib(project_dbupgrade) checkout_version", "to checkout revision, for example, 16500 -->')) updatelib_inst.updateDatabaseVersion(checkout_version) print \"\\033[35mVersion", "raw_input('Revert to checkout version, for example, 2.1 -->') if checkout_version", "Updated in the database %s\\033[0m\" % checkout_version updatelib_inst.updateDatabaseRevision(checkout_revision) print \"\\033[35mRevision", "= updatelib.UpdateLib(project_dbupgrade) checkout_version = raw_input('Revert to checkout version, for example,", "'2.1.2' etc\" raise checkout_revision = int(raw_input('Revert to checkout revision, for", "drop=True) if __name__ == \"__main__\": updatelib_inst = updatelib.UpdateLib(project_dbupgrade) checkout_version =", "checkout_revision = int(raw_input('Revert to checkout revision, for example, 16500 -->'))", "try: map((lambda x:int(x)),checkout_version.split('.')[:2]) except: print \"valid versions are 'trunk', '2.1',", "!= 'trunk': try: map((lambda x:int(x)),checkout_version.split('.')[:2]) except: print \"valid versions are", "-->') if checkout_version != 'trunk': try: map((lambda x:int(x)),checkout_version.split('.')[:2]) except: print", "== \"__main__\": updatelib_inst = updatelib.UpdateLib(project_dbupgrade) checkout_version = raw_input('Revert to checkout", "\"valid versions are 'trunk', '2.1', or '2.1.2' etc\" raise checkout_revision", "revision, for example, 16500 -->')) updatelib_inst.updateDatabaseVersion(checkout_version) print \"\\033[35mVersion Updated in", "\"\\033[35mVersion Updated in the database %s\\033[0m\" % checkout_version updatelib_inst.updateDatabaseRevision(checkout_revision) print", "'2.1', or '2.1.2' etc\" raise checkout_revision = int(raw_input('Revert to checkout", "import updatelib project_dbupgrade = dbupgrade.DBUpgradeTools('projectdata', drop=True) if __name__ == \"__main__\":", "\"__main__\": updatelib_inst = updatelib.UpdateLib(project_dbupgrade) checkout_version = raw_input('Revert to checkout version,", "= raw_input('Revert to checkout version, for example, 2.1 -->') if", "or '2.1.2' etc\" raise checkout_revision = int(raw_input('Revert to checkout revision,", "in the database %s\\033[0m\" % checkout_version updatelib_inst.updateDatabaseRevision(checkout_revision) print \"\\033[35mRevision Updated", "% checkout_version updatelib_inst.updateDatabaseRevision(checkout_revision) print \"\\033[35mRevision Updated in the database as", "updatelib_inst = updatelib.UpdateLib(project_dbupgrade) checkout_version = raw_input('Revert to checkout version, for", "updatelib.UpdateLib(project_dbupgrade) checkout_version = raw_input('Revert to checkout version, for example, 2.1", "example, 2.1 -->') if checkout_version != 'trunk': try: map((lambda x:int(x)),checkout_version.split('.')[:2])", "%s\\033[0m\" % checkout_version updatelib_inst.updateDatabaseRevision(checkout_revision) print \"\\033[35mRevision Updated in the database", "version, for example, 2.1 -->') if checkout_version != 'trunk': try:", "print \"valid versions are 'trunk', '2.1', or '2.1.2' etc\" raise", "example, 16500 -->')) updatelib_inst.updateDatabaseVersion(checkout_version) print \"\\033[35mVersion Updated in the database", "updatelib_inst.updateDatabaseRevision(checkout_revision) print \"\\033[35mRevision Updated in the database as %d\\033[0m\" %", "raise checkout_revision = int(raw_input('Revert to checkout revision, for example, 16500", "are 'trunk', '2.1', or '2.1.2' etc\" raise checkout_revision = int(raw_input('Revert", "for example, 16500 -->')) updatelib_inst.updateDatabaseVersion(checkout_version) print \"\\033[35mVersion Updated in the", "16500 -->')) updatelib_inst.updateDatabaseVersion(checkout_version) print \"\\033[35mVersion Updated in the database %s\\033[0m\"", "-->')) updatelib_inst.updateDatabaseVersion(checkout_version) print \"\\033[35mVersion Updated in the database %s\\033[0m\" %", "updatelib project_dbupgrade = dbupgrade.DBUpgradeTools('projectdata', drop=True) if __name__ == \"__main__\": updatelib_inst", "for example, 2.1 -->') if checkout_version != 'trunk': try: map((lambda", "checkout_version = raw_input('Revert to checkout version, for example, 2.1 -->')", "2.1 -->') if checkout_version != 'trunk': try: map((lambda x:int(x)),checkout_version.split('.')[:2]) except:", "import dbupgrade, dbconfig import updatelib project_dbupgrade = dbupgrade.DBUpgradeTools('projectdata', drop=True) if", "etc\" raise checkout_revision = int(raw_input('Revert to checkout revision, for example,", "'trunk': try: map((lambda x:int(x)),checkout_version.split('.')[:2]) except: print \"valid versions are 'trunk',", "'trunk', '2.1', or '2.1.2' etc\" raise checkout_revision = int(raw_input('Revert to", "dbconfig import updatelib project_dbupgrade = dbupgrade.DBUpgradeTools('projectdata', drop=True) if __name__ ==", "if __name__ == \"__main__\": updatelib_inst = updatelib.UpdateLib(project_dbupgrade) checkout_version = raw_input('Revert" ]
[ "from fightchurn.listings.chap8.listing_8_2_logistic_regression import save_regression_summary, save_dataset_predictions def regression_cparam(data_set_path, C_param): X,y =", "from sklearn.linear_model import LogisticRegression from fightchurn.listings.chap8.listing_8_2_logistic_regression import prepare_data, save_regression_model from", "C=C_param, penalty='l1', solver='liblinear', fit_intercept=True) retain_reg.fit(X, y) c_ext = '_c{:.3f}'.format(C_param) save_regression_summary(data_set_path,retain_reg,ext=c_ext)", "C_param): X,y = prepare_data(data_set_path) retain_reg = LogisticRegression( C=C_param, penalty='l1', solver='liblinear',", "fightchurn.listings.chap8.listing_8_2_logistic_regression import save_regression_summary, save_dataset_predictions def regression_cparam(data_set_path, C_param): X,y = prepare_data(data_set_path)", "LogisticRegression from fightchurn.listings.chap8.listing_8_2_logistic_regression import prepare_data, save_regression_model from fightchurn.listings.chap8.listing_8_2_logistic_regression import save_regression_summary,", "prepare_data(data_set_path) retain_reg = LogisticRegression( C=C_param, penalty='l1', solver='liblinear', fit_intercept=True) retain_reg.fit(X, y)", "save_regression_model from fightchurn.listings.chap8.listing_8_2_logistic_regression import save_regression_summary, save_dataset_predictions def regression_cparam(data_set_path, C_param): X,y", "import save_regression_summary, save_dataset_predictions def regression_cparam(data_set_path, C_param): X,y = prepare_data(data_set_path) retain_reg", "penalty='l1', solver='liblinear', fit_intercept=True) retain_reg.fit(X, y) c_ext = '_c{:.3f}'.format(C_param) save_regression_summary(data_set_path,retain_reg,ext=c_ext) save_regression_model(data_set_path,retain_reg,ext=c_ext)", "fightchurn.listings.chap8.listing_8_2_logistic_regression import prepare_data, save_regression_model from fightchurn.listings.chap8.listing_8_2_logistic_regression import save_regression_summary, save_dataset_predictions def", "import prepare_data, save_regression_model from fightchurn.listings.chap8.listing_8_2_logistic_regression import save_regression_summary, save_dataset_predictions def regression_cparam(data_set_path,", "retain_reg = LogisticRegression( C=C_param, penalty='l1', solver='liblinear', fit_intercept=True) retain_reg.fit(X, y) c_ext", "solver='liblinear', fit_intercept=True) retain_reg.fit(X, y) c_ext = '_c{:.3f}'.format(C_param) save_regression_summary(data_set_path,retain_reg,ext=c_ext) save_regression_model(data_set_path,retain_reg,ext=c_ext) save_dataset_predictions(data_set_path,retain_reg,X,ext=c_ext)", "regression_cparam(data_set_path, C_param): X,y = prepare_data(data_set_path) retain_reg = LogisticRegression( C=C_param, penalty='l1',", "X,y = prepare_data(data_set_path) retain_reg = LogisticRegression( C=C_param, penalty='l1', solver='liblinear', fit_intercept=True)", "save_regression_summary, save_dataset_predictions def regression_cparam(data_set_path, C_param): X,y = prepare_data(data_set_path) retain_reg =", "= LogisticRegression( C=C_param, penalty='l1', solver='liblinear', fit_intercept=True) retain_reg.fit(X, y) c_ext =", "from fightchurn.listings.chap8.listing_8_2_logistic_regression import prepare_data, save_regression_model from fightchurn.listings.chap8.listing_8_2_logistic_regression import save_regression_summary, save_dataset_predictions", "sklearn.linear_model import LogisticRegression from fightchurn.listings.chap8.listing_8_2_logistic_regression import prepare_data, save_regression_model from fightchurn.listings.chap8.listing_8_2_logistic_regression", "def regression_cparam(data_set_path, C_param): X,y = prepare_data(data_set_path) retain_reg = LogisticRegression( C=C_param,", "= prepare_data(data_set_path) retain_reg = LogisticRegression( C=C_param, penalty='l1', solver='liblinear', fit_intercept=True) retain_reg.fit(X,", "import LogisticRegression from fightchurn.listings.chap8.listing_8_2_logistic_regression import prepare_data, save_regression_model from fightchurn.listings.chap8.listing_8_2_logistic_regression import", "LogisticRegression( C=C_param, penalty='l1', solver='liblinear', fit_intercept=True) retain_reg.fit(X, y) c_ext = '_c{:.3f}'.format(C_param)", "save_dataset_predictions def regression_cparam(data_set_path, C_param): X,y = prepare_data(data_set_path) retain_reg = LogisticRegression(", "prepare_data, save_regression_model from fightchurn.listings.chap8.listing_8_2_logistic_regression import save_regression_summary, save_dataset_predictions def regression_cparam(data_set_path, C_param):" ]
[ "3*6*16)) #%% [markdown] # ## ACP # Réduction de la", "l in clustering.kmeans_labels] plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = col)", "#%% from IPython.display import Image #%% for cl in range(0,19):", "# img = Tools.display_mosaic(image_array, nrow = 10) # fig =", "Zoom sur le cluster 0 #%% res_tmp = select_cluster(clustering, 1)", "range(0,19): print(\"Cluster %s\" % (cl)) res_tmp = select_cluster(clustering, cl) print(len(res_tmp))", "= select_cluster(clustering, cl) print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size = (54,", "[markdown] # ## Visualisation des clusters #%% def select_cluster(clustering, id_cluster):", "IPython.display import Image #%% for cl in range(0,19): print(\"Cluster %s\"", "pictures_preds = Tools.encoded_pictures_from_generator(generator_imgs, model) #%% intermediate_output = pictures_preds.reshape((pictures_preds.shape[0], 3*6*16)) #%%", "Sauvegarde des modèles #%% clustering.save() #%% # clustering = ClassicalClustering(cfg.get('clustering')['classical'])", "#%% clustering = ClassicalClustering(cfg.get('clustering')['classical'], pictures_id, intermediate_output) #%% clustering.compute_pca() #%% [markdown]", "autoreload %autoreload 2 os.chdir('/home/jovyan/work') #%% [markdown] # ## Import iss", "[Tools.read_np_picture(f, target_size = (54, 96)) for f in res_tmp[:100]] #", "= select_cluster(clustering, 1) #%% print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size =", "= Tools.generator_np_picture_from_filenames(filenames, target_size = (27, 48), batch = 496, nb_batch", "## Visualisation des clusters #%% def select_cluster(clustering, id_cluster): return [os.path.join('data/processed/models/autoencoder/train/k/',", "#%% clustering.save() #%% # clustering = ClassicalClustering(cfg.get('clustering')['classical']) clustering.load() #%% [markdown]", "## ACP # Réduction de la dimension #%% clustering =", "image_array = [Tools.read_np_picture(f, target_size = (54, 96)) for f in", "0], clustering.pca_reduction[:, 1], c = clustering.kmeans_labels) #%% [markdown] # ###", "Tools.encoded_pictures_from_generator(generator_imgs, model) #%% intermediate_output = pictures_preds.reshape((pictures_preds.shape[0], 3*6*16)) #%% [markdown] #", "mode = os.getenv(\"MODE\")) #%% [markdown] # ## Chargement du modèle", "= (54, 96)) for f in res_tmp] #%% Tools.display_mosaic(image_array, nrow", "# ## ACP # Réduction de la dimension #%% clustering", "#%% [markdown] # ## Chargement de la config #%% load_dotenv(find_dotenv())", "modèle #%% ## charger le modèle model_type = 'simple_conv' cfg.get('models')[model_type]['model_name']", "os #%% %load_ext autoreload %autoreload 2 os.chdir('/home/jovyan/work') #%% [markdown] #", "(54, 96)) for f in res_tmp[:100]] # img = Tools.display_mosaic(image_array,", "Tools.display_mosaic(image_array, nrow = 18) #%% col = [1 if l", "# ## Chargement de la config #%% load_dotenv(find_dotenv()) cfg =", "[markdown] # ## Chargement du modèle #%% ## charger le", "= Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/') generator_imgs = Tools.generator_np_picture_from_filenames(filenames, target_size = (27, 48), batch", "[markdown] # ## Résultats #%% [markdown] # ### Clusters intermediaires", "cl) print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size = (54, 96)) for", "= 10) # fig = plt.figure(1, figsize=(12, 7)) # plt.imshow(img,", "== id_cluster] #%% from IPython.display import Image #%% for cl", "ClassicalClustering(cfg.get('clustering')['classical'], pictures_id, intermediate_output) #%% clustering.compute_pca() #%% [markdown] # ## Kmeans", "96)) for f in res_tmp] #%% Tools.display_mosaic(image_array, nrow = 18)", "#%% [markdown] # ### Clusters finaux #%% plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:,", "#%% pictures_id, pictures_preds = Tools.encoded_pictures_from_generator(generator_imgs, model) #%% intermediate_output = pictures_preds.reshape((pictures_preds.shape[0],", "# ### Clusters intermediaires #%% fig = plt.figure(1, figsize=(12, 7))", "= 'auto') # plt.show() #%% [markdown] # ## Zoom sur", "c = clustering.kmeans_labels) #%% [markdown] # ### Clusters finaux #%%", "= (27, 48), batch = 496, nb_batch = 10) #%%", "# Réduction de la dimension #%% clustering = ClassicalClustering(cfg.get('clustering')['classical'], pictures_id,", "figsize=(12, 7)) # plt.imshow(img, aspect = 'auto') # plt.show() #%%", "pictures_id, pictures_preds = Tools.encoded_pictures_from_generator(generator_imgs, model) #%% intermediate_output = pictures_preds.reshape((pictures_preds.shape[0], 3*6*16))", "[markdown] # ## import classique import os #%% %load_ext autoreload", "col = [1 if l == 1 else 0 for", "img = Tools.display_mosaic(image_array, nrow = 10) # fig = plt.figure(1,", "from iss.tools import Config from iss.tools import Tools from iss.models", "intermediate_output = pictures_preds.reshape((pictures_preds.shape[0], 3*6*16)) #%% [markdown] # ## ACP #", "os.getenv(\"PROJECT_DIR\"), mode = os.getenv(\"MODE\")) #%% [markdown] # ## Chargement du", "= Tools.display_mosaic(image_array, nrow = 10) # fig = plt.figure(1, figsize=(12,", "plt.imshow(img, aspect = 'auto') # plt.show() #%% [markdown] # ##", "CAH # Seconds clusters #%% clustering.compute_cah() clustering.compute_cah_labels() #%% [markdown] #", "from iss.clustering import ClassicalClustering from iss.clustering import AdvancedClustering from dotenv", "= clustering.kmeans_labels) #%% [markdown] # ### Clusters finaux #%% plt.scatter(clustering.pca_reduction[:,", "# ## Visualisation des clusters #%% def select_cluster(clustering, id_cluster): return", "clustering.compute_cah() clustering.compute_cah_labels() #%% [markdown] # ## Résultats #%% [markdown] #", "Config(project_dir = os.getenv(\"PROJECT_DIR\"), mode = os.getenv(\"MODE\")) #%% [markdown] # ##", "= 18) #%% col = [1 if l == 1", "clustering.compute_pca() #%% [markdown] # ## Kmeans # Premiers clusters #%%", "config #%% load_dotenv(find_dotenv()) cfg = Config(project_dir = os.getenv(\"PROJECT_DIR\"), mode =", "[markdown] # ### Sauvegarde des modèles #%% clustering.save() #%% #", "(cl)) res_tmp = select_cluster(clustering, cl) print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size", "dimension #%% clustering = ClassicalClustering(cfg.get('clustering')['classical'], pictures_id, intermediate_output) #%% clustering.compute_pca() #%%", "== 1 else 0 for l in clustering.kmeans_labels] plt.scatter(clustering.pca_reduction[:, 0],", "Visualisation des clusters #%% def select_cluster(clustering, id_cluster): return [os.path.join('data/processed/models/autoencoder/train/k/', res[0]", "#%% res_tmp = select_cluster(clustering, 1) #%% print(len(res_tmp)) image_array = [Tools.read_np_picture(f,", "import Tools from iss.models import SimpleConvAutoEncoder from iss.clustering import ClassicalClustering", "= SimpleConvAutoEncoder(cfg.get('models')[model_type]) #%% [markdown] ## Chargement des images #%% filenames", "Config from iss.tools import Tools from iss.models import SimpleConvAutoEncoder from", "SimpleConvAutoEncoder from iss.clustering import ClassicalClustering from iss.clustering import AdvancedClustering from", "Résultats #%% [markdown] # ### Clusters intermediaires #%% fig =", "Clusters intermediaires #%% fig = plt.figure(1, figsize=(12, 7)) plt.scatter(clustering.pca_reduction[:, 0],", "clusters #%% clustering.compute_kmeans() clustering.compute_kmeans_centers() #%% [markdown] # ## CAH #", "= ClassicalClustering(cfg.get('clustering')['classical']) clustering.load() #%% [markdown] # ## Visualisation des clusters", "de la config #%% load_dotenv(find_dotenv()) cfg = Config(project_dir = os.getenv(\"PROJECT_DIR\"),", "# ## import classique import os #%% %load_ext autoreload %autoreload", "96)) for f in res_tmp[:100]] # img = Tools.display_mosaic(image_array, nrow", "figsize=(12, 7)) plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = clustering.kmeans_labels) #%%", "sur le cluster 0 #%% res_tmp = select_cluster(clustering, 1) #%%", "Clustering classique #%% [markdown] # ## import classique import os", "'model_colab' model = SimpleConvAutoEncoder(cfg.get('models')[model_type]) #%% [markdown] ## Chargement des images", "from IPython.display import Image #%% for cl in range(0,19): print(\"Cluster", "0 #%% res_tmp = select_cluster(clustering, 1) #%% print(len(res_tmp)) image_array =", "os.chdir('/home/jovyan/work') #%% [markdown] # ## Import iss #%% from iss.tools", "iss #%% from iss.tools import Config from iss.tools import Tools", "in res_tmp] #%% Tools.display_mosaic(image_array, nrow = 18) #%% col =", "1) #%% print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size = (54, 96))", "= 496, nb_batch = 10) #%% pictures_id, pictures_preds = Tools.encoded_pictures_from_generator(generator_imgs,", "clustering.pca_reduction[:, 1], c = clustering.final_labels) #%% [markdown] # ### Sauvegarde", "ACP # Réduction de la dimension #%% clustering = ClassicalClustering(cfg.get('clustering')['classical'],", "0 for l in clustering.kmeans_labels] plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c", "(27, 48), batch = 496, nb_batch = 10) #%% pictures_id,", "res[2] == id_cluster] #%% from IPython.display import Image #%% for", "select_cluster(clustering, cl) print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size = (54, 96))", "from iss.tools import Tools from iss.models import SimpleConvAutoEncoder from iss.clustering", "clustering.pca_reduction[:, 1], c = clustering.kmeans_labels) #%% [markdown] # ### Clusters", "classique import os #%% %load_ext autoreload %autoreload 2 os.chdir('/home/jovyan/work') #%%", "for res in clustering.get_zip_results() if res[2] == id_cluster] #%% from", "filenames = Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/') generator_imgs = Tools.generator_np_picture_from_filenames(filenames, target_size = (27, 48),", "# fig = plt.figure(1, figsize=(12, 7)) # plt.imshow(img, aspect =", "= 'simple_conv' cfg.get('models')[model_type]['model_name'] = 'model_colab' model = SimpleConvAutoEncoder(cfg.get('models')[model_type]) #%% [markdown]", "Import iss #%% from iss.tools import Config from iss.tools import", "in range(0,19): print(\"Cluster %s\" % (cl)) res_tmp = select_cluster(clustering, cl)", "[Tools.read_np_picture(f, target_size = (54, 96)) for f in res_tmp] #%%", "import os #%% %load_ext autoreload %autoreload 2 os.chdir('/home/jovyan/work') #%% [markdown]", "model = SimpleConvAutoEncoder(cfg.get('models')[model_type]) #%% [markdown] ## Chargement des images #%%", "#%% [markdown] # ## Visualisation des clusters #%% def select_cluster(clustering,", "%load_ext autoreload %autoreload 2 os.chdir('/home/jovyan/work') #%% [markdown] # ## Import", "clustering.compute_kmeans() clustering.compute_kmeans_centers() #%% [markdown] # ## CAH # Seconds clusters", "1], c = clustering.kmeans_labels) #%% [markdown] # ### Clusters finaux", "Tools.generator_np_picture_from_filenames(filenames, target_size = (27, 48), batch = 496, nb_batch =", "Kmeans # Premiers clusters #%% clustering.compute_kmeans() clustering.compute_kmeans_centers() #%% [markdown] #", "## Résultats #%% [markdown] # ### Clusters intermediaires #%% fig", "# Clustering classique #%% [markdown] # ## import classique import", "## CAH # Seconds clusters #%% clustering.compute_cah() clustering.compute_cah_labels() #%% [markdown]", "#%% [markdown] # ## Import iss #%% from iss.tools import", "print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size = (54, 96)) for f", "target_size = (54, 96)) for f in res_tmp[:100]] # img", "[1 if l == 1 else 0 for l in", "clustering.load() #%% [markdown] # ## Visualisation des clusters #%% def", "ClassicalClustering from iss.clustering import AdvancedClustering from dotenv import find_dotenv, load_dotenv", "#%% plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = clustering.final_labels) #%% [markdown]", "charger le modèle model_type = 'simple_conv' cfg.get('models')[model_type]['model_name'] = 'model_colab' model", "## Kmeans # Premiers clusters #%% clustering.compute_kmeans() clustering.compute_kmeans_centers() #%% [markdown]", "res[0] + '.jpg') for res in clustering.get_zip_results() if res[2] ==", "cfg = Config(project_dir = os.getenv(\"PROJECT_DIR\"), mode = os.getenv(\"MODE\")) #%% [markdown]", "from iss.models import SimpleConvAutoEncoder from iss.clustering import ClassicalClustering from iss.clustering", "du modèle #%% ## charger le modèle model_type = 'simple_conv'", "= clustering.final_labels) #%% [markdown] # ### Sauvegarde des modèles #%%", "select_cluster(clustering, id_cluster): return [os.path.join('data/processed/models/autoencoder/train/k/', res[0] + '.jpg') for res in", "np #%% [markdown] # ## Chargement de la config #%%", "target_size = (54, 96)) for f in res_tmp] #%% Tools.display_mosaic(image_array,", "des images #%% filenames = Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/') generator_imgs = Tools.generator_np_picture_from_filenames(filenames, target_size", "cl in range(0,19): print(\"Cluster %s\" % (cl)) res_tmp = select_cluster(clustering,", "import find_dotenv, load_dotenv import numpy as np #%% [markdown] #", "### Clusters intermediaires #%% fig = plt.figure(1, figsize=(12, 7)) plt.scatter(clustering.pca_reduction[:,", "#%% [markdown] # ## Résultats #%% [markdown] # ### Clusters", "import numpy as np #%% [markdown] # ## Chargement de", "classique #%% [markdown] # ## import classique import os #%%", "48), batch = 496, nb_batch = 10) #%% pictures_id, pictures_preds", "### Clusters finaux #%% plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c =", "iss.tools import Tools from iss.models import SimpleConvAutoEncoder from iss.clustering import", "'auto') # plt.show() #%% [markdown] # ## Zoom sur le", "## Chargement des images #%% filenames = Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/') generator_imgs =", "= pictures_preds.reshape((pictures_preds.shape[0], 3*6*16)) #%% [markdown] # ## ACP # Réduction", "model) #%% intermediate_output = pictures_preds.reshape((pictures_preds.shape[0], 3*6*16)) #%% [markdown] # ##", "for f in res_tmp[:100]] # img = Tools.display_mosaic(image_array, nrow =", "= [Tools.read_np_picture(f, target_size = (54, 96)) for f in res_tmp]", "# ## Résultats #%% [markdown] # ### Clusters intermediaires #%%", "#%% load_dotenv(find_dotenv()) cfg = Config(project_dir = os.getenv(\"PROJECT_DIR\"), mode = os.getenv(\"MODE\"))", "model_type = 'simple_conv' cfg.get('models')[model_type]['model_name'] = 'model_colab' model = SimpleConvAutoEncoder(cfg.get('models')[model_type]) #%%", "clustering = ClassicalClustering(cfg.get('clustering')['classical'], pictures_id, intermediate_output) #%% clustering.compute_pca() #%% [markdown] #", "plt.figure(1, figsize=(12, 7)) # plt.imshow(img, aspect = 'auto') # plt.show()", "# ### Clusters finaux #%% plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c", "print(\"Cluster %s\" % (cl)) res_tmp = select_cluster(clustering, cl) print(len(res_tmp)) image_array", "[markdown] # ## Chargement de la config #%% load_dotenv(find_dotenv()) cfg", "0], clustering.pca_reduction[:, 1], c = clustering.final_labels) #%% [markdown] # ###", "id_cluster): return [os.path.join('data/processed/models/autoencoder/train/k/', res[0] + '.jpg') for res in clustering.get_zip_results()", "clustering.save() #%% # clustering = ClassicalClustering(cfg.get('clustering')['classical']) clustering.load() #%% [markdown] #", "# ## Zoom sur le cluster 0 #%% res_tmp =", "res in clustering.get_zip_results() if res[2] == id_cluster] #%% from IPython.display", "plt.show() #%% [markdown] # ## Zoom sur le cluster 0", "la config #%% load_dotenv(find_dotenv()) cfg = Config(project_dir = os.getenv(\"PROJECT_DIR\"), mode", "#%% from iss.tools import Config from iss.tools import Tools from", "clustering = ClassicalClustering(cfg.get('clustering')['classical']) clustering.load() #%% [markdown] # ## Visualisation des", "#%% [markdown] # ### Sauvegarde des modèles #%% clustering.save() #%%", "fig = plt.figure(1, figsize=(12, 7)) # plt.imshow(img, aspect = 'auto')", "= [1 if l == 1 else 0 for l", "1], c = clustering.final_labels) #%% [markdown] # ### Sauvegarde des", "load_dotenv(find_dotenv()) cfg = Config(project_dir = os.getenv(\"PROJECT_DIR\"), mode = os.getenv(\"MODE\")) #%%", "plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = col) #%% plt.scatter(clustering.pca_reduction[np.array(col) ==", "= Tools.encoded_pictures_from_generator(generator_imgs, model) #%% intermediate_output = pictures_preds.reshape((pictures_preds.shape[0], 3*6*16)) #%% [markdown]", "### Sauvegarde des modèles #%% clustering.save() #%% # clustering =", "#%% intermediate_output = pictures_preds.reshape((pictures_preds.shape[0], 3*6*16)) #%% [markdown] # ## ACP", "plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = clustering.kmeans_labels) #%% [markdown] #", "l == 1 else 0 for l in clustering.kmeans_labels] plt.scatter(clustering.pca_reduction[:,", "(54, 96)) for f in res_tmp] #%% Tools.display_mosaic(image_array, nrow =", "intermediate_output) #%% clustering.compute_pca() #%% [markdown] # ## Kmeans # Premiers", "#%% [markdown] # ## Chargement du modèle #%% ## charger", "% (cl)) res_tmp = select_cluster(clustering, cl) print(len(res_tmp)) image_array = [Tools.read_np_picture(f,", "for f in res_tmp] #%% Tools.display_mosaic(image_array, nrow = 18) #%%", "for l in clustering.kmeans_labels] plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c =", "[markdown] ## Chargement des images #%% filenames = Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/') generator_imgs", "#%% [markdown] # ## Kmeans # Premiers clusters #%% clustering.compute_kmeans()", "from dotenv import find_dotenv, load_dotenv import numpy as np #%%", "os.getenv(\"MODE\")) #%% [markdown] # ## Chargement du modèle #%% ##", "clusters #%% clustering.compute_cah() clustering.compute_cah_labels() #%% [markdown] # ## Résultats #%%", "iss.clustering import AdvancedClustering from dotenv import find_dotenv, load_dotenv import numpy", "AdvancedClustering from dotenv import find_dotenv, load_dotenv import numpy as np", "#%% clustering.compute_cah() clustering.compute_cah_labels() #%% [markdown] # ## Résultats #%% [markdown]", "# Seconds clusters #%% clustering.compute_cah() clustering.compute_cah_labels() #%% [markdown] # ##", "= col) #%% plt.scatter(clustering.pca_reduction[np.array(col) == 1, 0], clustering.pca_reduction[np.array(col) == 1,", "select_cluster(clustering, 1) #%% print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size = (54,", "import SimpleConvAutoEncoder from iss.clustering import ClassicalClustering from iss.clustering import AdvancedClustering", "fig = plt.figure(1, figsize=(12, 7)) plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c", "finaux #%% plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = clustering.final_labels) #%%", "= os.getenv(\"PROJECT_DIR\"), mode = os.getenv(\"MODE\")) #%% [markdown] # ## Chargement", "for cl in range(0,19): print(\"Cluster %s\" % (cl)) res_tmp =", "des clusters #%% def select_cluster(clustering, id_cluster): return [os.path.join('data/processed/models/autoencoder/train/k/', res[0] +", "## charger le modèle model_type = 'simple_conv' cfg.get('models')[model_type]['model_name'] = 'model_colab'", "SimpleConvAutoEncoder(cfg.get('models')[model_type]) #%% [markdown] ## Chargement des images #%% filenames =", "clusters #%% def select_cluster(clustering, id_cluster): return [os.path.join('data/processed/models/autoencoder/train/k/', res[0] + '.jpg')", "res_tmp] #%% Tools.display_mosaic(image_array, nrow = 18) #%% col = [1", "cluster 0 #%% res_tmp = select_cluster(clustering, 1) #%% print(len(res_tmp)) image_array", "#%% print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size = (54, 96)) for", "#%% def select_cluster(clustering, id_cluster): return [os.path.join('data/processed/models/autoencoder/train/k/', res[0] + '.jpg') for", "Image #%% for cl in range(0,19): print(\"Cluster %s\" % (cl))", "# ## Chargement du modèle #%% ## charger le modèle", "batch = 496, nb_batch = 10) #%% pictures_id, pictures_preds =", "[markdown] # ## Kmeans # Premiers clusters #%% clustering.compute_kmeans() clustering.compute_kmeans_centers()", "clustering.kmeans_labels) #%% [markdown] # ### Clusters finaux #%% plt.scatter(clustering.pca_reduction[:, 0],", "'simple_conv' cfg.get('models')[model_type]['model_name'] = 'model_colab' model = SimpleConvAutoEncoder(cfg.get('models')[model_type]) #%% [markdown] ##", "if l == 1 else 0 for l in clustering.kmeans_labels]", "Clusters finaux #%% plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = clustering.final_labels)", "Chargement du modèle #%% ## charger le modèle model_type =", "= Config(project_dir = os.getenv(\"PROJECT_DIR\"), mode = os.getenv(\"MODE\")) #%% [markdown] #", "#%% clustering.compute_pca() #%% [markdown] # ## Kmeans # Premiers clusters", "id_cluster] #%% from IPython.display import Image #%% for cl in", "= (54, 96)) for f in res_tmp[:100]] # img =", "#%% for cl in range(0,19): print(\"Cluster %s\" % (cl)) res_tmp", "[markdown] # # Clustering classique #%% [markdown] # ## import", "from iss.clustering import AdvancedClustering from dotenv import find_dotenv, load_dotenv import", "f in res_tmp] #%% Tools.display_mosaic(image_array, nrow = 18) #%% col", "target_size = (27, 48), batch = 496, nb_batch = 10)", "in res_tmp[:100]] # img = Tools.display_mosaic(image_array, nrow = 10) #", "c = col) #%% plt.scatter(clustering.pca_reduction[np.array(col) == 1, 0], clustering.pca_reduction[np.array(col) ==", "[markdown] # ## CAH # Seconds clusters #%% clustering.compute_cah() clustering.compute_cah_labels()", "as np #%% [markdown] # ## Chargement de la config", "[os.path.join('data/processed/models/autoencoder/train/k/', res[0] + '.jpg') for res in clustering.get_zip_results() if res[2]", "clustering.final_labels) #%% [markdown] # ### Sauvegarde des modèles #%% clustering.save()", "if res[2] == id_cluster] #%% from IPython.display import Image #%%", "import AdvancedClustering from dotenv import find_dotenv, load_dotenv import numpy as", "+ '.jpg') for res in clustering.get_zip_results() if res[2] == id_cluster]", "[markdown] # ### Clusters finaux #%% plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1],", "plt.figure(1, figsize=(12, 7)) plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = clustering.kmeans_labels)", "des modèles #%% clustering.save() #%% # clustering = ClassicalClustering(cfg.get('clustering')['classical']) clustering.load()", "find_dotenv, load_dotenv import numpy as np #%% [markdown] # ##", "## Chargement du modèle #%% ## charger le modèle model_type", "1 else 0 for l in clustering.kmeans_labels] plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:,", "# ## Kmeans # Premiers clusters #%% clustering.compute_kmeans() clustering.compute_kmeans_centers() #%%", "clustering.compute_cah_labels() #%% [markdown] # ## Résultats #%% [markdown] # ###", "res_tmp = select_cluster(clustering, cl) print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size =", "#%% [markdown] # ## Zoom sur le cluster 0 #%%", "# plt.show() #%% [markdown] # ## Zoom sur le cluster", "Réduction de la dimension #%% clustering = ClassicalClustering(cfg.get('clustering')['classical'], pictures_id, intermediate_output)", "col) #%% plt.scatter(clustering.pca_reduction[np.array(col) == 1, 0], clustering.pca_reduction[np.array(col) == 1, 1])", "10) # fig = plt.figure(1, figsize=(12, 7)) # plt.imshow(img, aspect", "clustering.kmeans_labels] plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = col) #%% plt.scatter(clustering.pca_reduction[np.array(col)", "#%% filenames = Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/') generator_imgs = Tools.generator_np_picture_from_filenames(filenames, target_size = (27,", "# ### Sauvegarde des modèles #%% clustering.save() #%% # clustering", "load_dotenv import numpy as np #%% [markdown] # ## Chargement", "#%% [markdown] # ### Clusters intermediaires #%% fig = plt.figure(1,", "#%% # clustering = ClassicalClustering(cfg.get('clustering')['classical']) clustering.load() #%% [markdown] # ##", "Tools from iss.models import SimpleConvAutoEncoder from iss.clustering import ClassicalClustering from", "#%% %load_ext autoreload %autoreload 2 os.chdir('/home/jovyan/work') #%% [markdown] # ##", "10) #%% pictures_id, pictures_preds = Tools.encoded_pictures_from_generator(generator_imgs, model) #%% intermediate_output =", "clustering.pca_reduction[:, 1], c = col) #%% plt.scatter(clustering.pca_reduction[np.array(col) == 1, 0],", "in clustering.get_zip_results() if res[2] == id_cluster] #%% from IPython.display import", "res_tmp = select_cluster(clustering, 1) #%% print(len(res_tmp)) image_array = [Tools.read_np_picture(f, target_size", "#%% fig = plt.figure(1, figsize=(12, 7)) plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1],", "2 os.chdir('/home/jovyan/work') #%% [markdown] # ## Import iss #%% from", "## Zoom sur le cluster 0 #%% res_tmp = select_cluster(clustering,", "pictures_preds.reshape((pictures_preds.shape[0], 3*6*16)) #%% [markdown] # ## ACP # Réduction de", "in clustering.kmeans_labels] plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = col) #%%", "= [Tools.read_np_picture(f, target_size = (54, 96)) for f in res_tmp[:100]]", "clustering.get_zip_results() if res[2] == id_cluster] #%% from IPython.display import Image", "# Premiers clusters #%% clustering.compute_kmeans() clustering.compute_kmeans_centers() #%% [markdown] # ##", "#%% [markdown] # ## CAH # Seconds clusters #%% clustering.compute_cah()", "## Chargement de la config #%% load_dotenv(find_dotenv()) cfg = Config(project_dir", "= 'model_colab' model = SimpleConvAutoEncoder(cfg.get('models')[model_type]) #%% [markdown] ## Chargement des", "# clustering = ClassicalClustering(cfg.get('clustering')['classical']) clustering.load() #%% [markdown] # ## Visualisation", "Seconds clusters #%% clustering.compute_cah() clustering.compute_cah_labels() #%% [markdown] # ## Résultats", "#%% clustering.compute_kmeans() clustering.compute_kmeans_centers() #%% [markdown] # ## CAH # Seconds", "## Import iss #%% from iss.tools import Config from iss.tools", "#%% [markdown] ## Chargement des images #%% filenames = Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/')", "import Config from iss.tools import Tools from iss.models import SimpleConvAutoEncoder", "iss.models import SimpleConvAutoEncoder from iss.clustering import ClassicalClustering from iss.clustering import", "#%% ## charger le modèle model_type = 'simple_conv' cfg.get('models')[model_type]['model_name'] =", "= ClassicalClustering(cfg.get('clustering')['classical'], pictures_id, intermediate_output) #%% clustering.compute_pca() #%% [markdown] # ##", "# plt.imshow(img, aspect = 'auto') # plt.show() #%% [markdown] #", "= plt.figure(1, figsize=(12, 7)) # plt.imshow(img, aspect = 'auto') #", "aspect = 'auto') # plt.show() #%% [markdown] # ## Zoom", "0], clustering.pca_reduction[:, 1], c = col) #%% plt.scatter(clustering.pca_reduction[np.array(col) == 1,", "de la dimension #%% clustering = ClassicalClustering(cfg.get('clustering')['classical'], pictures_id, intermediate_output) #%%", "clustering.compute_kmeans_centers() #%% [markdown] # ## CAH # Seconds clusters #%%", "res_tmp[:100]] # img = Tools.display_mosaic(image_array, nrow = 10) # fig", "[markdown] # ### Clusters intermediaires #%% fig = plt.figure(1, figsize=(12,", "le modèle model_type = 'simple_conv' cfg.get('models')[model_type]['model_name'] = 'model_colab' model =", "def select_cluster(clustering, id_cluster): return [os.path.join('data/processed/models/autoencoder/train/k/', res[0] + '.jpg') for res", "#%% col = [1 if l == 1 else 0", "nb_batch = 10) #%% pictures_id, pictures_preds = Tools.encoded_pictures_from_generator(generator_imgs, model) #%%", "7)) plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = clustering.kmeans_labels) #%% [markdown]", "f in res_tmp[:100]] # img = Tools.display_mosaic(image_array, nrow = 10)", "# ## Import iss #%% from iss.tools import Config from", "c = clustering.final_labels) #%% [markdown] # ### Sauvegarde des modèles", "Tools.display_mosaic(image_array, nrow = 10) # fig = plt.figure(1, figsize=(12, 7))", "images #%% filenames = Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/') generator_imgs = Tools.generator_np_picture_from_filenames(filenames, target_size =", "intermediaires #%% fig = plt.figure(1, figsize=(12, 7)) plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:,", "nrow = 10) # fig = plt.figure(1, figsize=(12, 7)) #", "[markdown] # ## ACP # Réduction de la dimension #%%", "= plt.figure(1, figsize=(12, 7)) plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c =", "Chargement des images #%% filenames = Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/') generator_imgs = Tools.generator_np_picture_from_filenames(filenames,", "import classique import os #%% %load_ext autoreload %autoreload 2 os.chdir('/home/jovyan/work')", "pictures_id, intermediate_output) #%% clustering.compute_pca() #%% [markdown] # ## Kmeans #", "1], c = col) #%% plt.scatter(clustering.pca_reduction[np.array(col) == 1, 0], clustering.pca_reduction[np.array(col)", "18) #%% col = [1 if l == 1 else", "= os.getenv(\"MODE\")) #%% [markdown] # ## Chargement du modèle #%%", "Premiers clusters #%% clustering.compute_kmeans() clustering.compute_kmeans_centers() #%% [markdown] # ## CAH", "modèles #%% clustering.save() #%% # clustering = ClassicalClustering(cfg.get('clustering')['classical']) clustering.load() #%%", "7)) # plt.imshow(img, aspect = 'auto') # plt.show() #%% [markdown]", "## import classique import os #%% %load_ext autoreload %autoreload 2", "return [os.path.join('data/processed/models/autoencoder/train/k/', res[0] + '.jpg') for res in clustering.get_zip_results() if", "#%% [markdown] # # Clustering classique #%% [markdown] # ##", "#%% Tools.display_mosaic(image_array, nrow = 18) #%% col = [1 if", "import ClassicalClustering from iss.clustering import AdvancedClustering from dotenv import find_dotenv,", "Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/') generator_imgs = Tools.generator_np_picture_from_filenames(filenames, target_size = (27, 48), batch =", "ClassicalClustering(cfg.get('clustering')['classical']) clustering.load() #%% [markdown] # ## Visualisation des clusters #%%", "import Image #%% for cl in range(0,19): print(\"Cluster %s\" %", "else 0 for l in clustering.kmeans_labels] plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1],", "iss.tools import Config from iss.tools import Tools from iss.models import", "dotenv import find_dotenv, load_dotenv import numpy as np #%% [markdown]", "cfg.get('models')[model_type]['model_name'] = 'model_colab' model = SimpleConvAutoEncoder(cfg.get('models')[model_type]) #%% [markdown] ## Chargement", "#%% [markdown] # ## import classique import os #%% %load_ext", "%autoreload 2 os.chdir('/home/jovyan/work') #%% [markdown] # ## Import iss #%%", "# ## CAH # Seconds clusters #%% clustering.compute_cah() clustering.compute_cah_labels() #%%", "[markdown] # ## Zoom sur le cluster 0 #%% res_tmp", "iss.clustering import ClassicalClustering from iss.clustering import AdvancedClustering from dotenv import", "la dimension #%% clustering = ClassicalClustering(cfg.get('clustering')['classical'], pictures_id, intermediate_output) #%% clustering.compute_pca()", "'.jpg') for res in clustering.get_zip_results() if res[2] == id_cluster] #%%", "# # Clustering classique #%% [markdown] # ## import classique", "Chargement de la config #%% load_dotenv(find_dotenv()) cfg = Config(project_dir =", "%s\" % (cl)) res_tmp = select_cluster(clustering, cl) print(len(res_tmp)) image_array =", "modèle model_type = 'simple_conv' cfg.get('models')[model_type]['model_name'] = 'model_colab' model = SimpleConvAutoEncoder(cfg.get('models')[model_type])", "[markdown] # ## Import iss #%% from iss.tools import Config", "plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = clustering.final_labels) #%% [markdown] #", "le cluster 0 #%% res_tmp = select_cluster(clustering, 1) #%% print(len(res_tmp))", "#%% [markdown] # ## ACP # Réduction de la dimension", "generator_imgs = Tools.generator_np_picture_from_filenames(filenames, target_size = (27, 48), batch = 496,", "nrow = 18) #%% col = [1 if l ==", "numpy as np #%% [markdown] # ## Chargement de la", "496, nb_batch = 10) #%% pictures_id, pictures_preds = Tools.encoded_pictures_from_generator(generator_imgs, model)", "= 10) #%% pictures_id, pictures_preds = Tools.encoded_pictures_from_generator(generator_imgs, model) #%% intermediate_output" ]
[ "handler_error = logging.FileHandler(os.path.join(this_folder, error_log_file)) # set levels handler_info.setLevel(logging.INFO) handler_error.setLevel(logging.ERROR) #", "'%(message).180s', datefmt='%Y-%m-%d %H:%M:%S') format_error = \\ logging.Formatter( '%(asctime)s %(levelname)s '", "= logging.FileHandler(os.path.join(this_folder, error_log_file)) # set levels handler_info.setLevel(logging.INFO) handler_error.setLevel(logging.ERROR) # create", "__author__ = \"<NAME>\" __email__ = \"<EMAIL>\" import errno import logging", "%(module)s.%(funcName)s linenr.%(lineno)s ] ' '[ thread: %(threadName)s ] %(message)s') handler_info.setFormatter(format_info)", "location this_folder = os.path.dirname(os.path.abspath(__file__)) # define log folder related to", "'/info.log' error_log_file = log_folder + '/error.log' # check if exists", "= log_folder + '/info.log' error_log_file = log_folder + '/error.log' #", "datefmt='%Y-%m-%d %H:%M:%S') format_error = \\ logging.Formatter( '%(asctime)s %(levelname)s ' '[", "%H:%M:%S') format_error = \\ logging.Formatter( '%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s", "import os from logging.config import fileConfig # runtime location this_folder", "log_folder + '/error.log' # check if exists or create log", "'[ %(module)s.%(funcName)s linenr.%(lineno)s ] ' '%(message).180s', datefmt='%Y-%m-%d %H:%M:%S') format_error =", "create log folder try: os.makedirs(log_folder, exist_ok=True) # Python>3.2 except TypeError:", "from logging.config import fileConfig # runtime location this_folder = os.path.dirname(os.path.abspath(__file__))", "# set levels handler_info.setLevel(logging.INFO) handler_error.setLevel(logging.ERROR) # create formatters and add", "and add to handlers format_info = \\ logging.Formatter('%(asctime)s %(levelname)s '", "logger = logging.getLogger(name) # add handler logger.addHandler(handler_info) logger.addHandler(handler_error) return logger", "add to handlers format_info = \\ logging.Formatter('%(asctime)s %(levelname)s ' '[", "levels handler_info.setLevel(logging.INFO) handler_error.setLevel(logging.ERROR) # create formatters and add to handlers", "ini_file) fileConfig(config_file, disable_existing_loggers=True) # create handlers handler_info = logging.FileHandler(os.path.join(this_folder, info_log_file))", "' '[ %(module)s.%(funcName)s linenr.%(lineno)s ] ' '[ thread: %(threadName)s ]", "def get_logger(name: str = __name__): logger = logging.getLogger(name) # add", "= logging.getLogger(name) # add handler logger.addHandler(handler_info) logger.addHandler(handler_error) return logger if", "files ini_file = 'debug.ini' info_log_file = log_folder + '/info.log' error_log_file", "log_folder + '/info.log' error_log_file = log_folder + '/error.log' # check", "+ '/info.log' error_log_file = log_folder + '/error.log' # check if", "] ' '%(message).180s', datefmt='%Y-%m-%d %H:%M:%S') format_error = \\ logging.Formatter( '%(asctime)s", "folder try: os.makedirs(log_folder, exist_ok=True) # Python>3.2 except TypeError: try: os.makedirs(log_folder)", "' '[ thread: %(threadName)s ] %(message)s') handler_info.setFormatter(format_info) handler_error.setFormatter(format_error) def get_logger(name:", "if exc.errno == errno.EEXIST and os.path.isdir(log_folder): pass else: raise #", "handler_error.setFormatter(format_error) def get_logger(name: str = __name__): logger = logging.getLogger(name) #", "----------------------------------------------------------- __author__ = \"<NAME>\" __email__ = \"<EMAIL>\" import errno import", "= os.path.dirname(os.path.abspath(__file__)) # define log folder related to location log_folder", "linenr.%(lineno)s ] ' '[ thread: %(threadName)s ] %(message)s') handler_info.setFormatter(format_info) handler_error.setFormatter(format_error)", "'debug.ini' info_log_file = log_folder + '/info.log' error_log_file = log_folder +", "'/error.log' # check if exists or create log folder try:", "linenr.%(lineno)s ] ' '%(message).180s', datefmt='%Y-%m-%d %H:%M:%S') format_error = \\ logging.Formatter(", "utf-8 -*- # ----------------------------------------------------------- # created 02.02.2021, tkaulke # <NAME>,", "= \\ logging.Formatter( '%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s ]", "config_file = os.path.join(this_folder, ini_file) fileConfig(config_file, disable_existing_loggers=True) # create handlers handler_info", "# runtime location this_folder = os.path.dirname(os.path.abspath(__file__)) # define log folder", "'%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s ] ' '[ thread:", "' '[ %(module)s.%(funcName)s linenr.%(lineno)s ] ' '%(message).180s', datefmt='%Y-%m-%d %H:%M:%S') format_error", "-*- coding: utf-8 -*- # ----------------------------------------------------------- # created 02.02.2021, tkaulke", "\"<EMAIL>\" import errno import logging import os from logging.config import", "= log_folder + '/error.log' # check if exists or create", "= logging.FileHandler(os.path.join(this_folder, info_log_file)) handler_error = logging.FileHandler(os.path.join(this_folder, error_log_file)) # set levels", "# -*- coding: utf-8 -*- # ----------------------------------------------------------- # created 02.02.2021,", "info_log_file = log_folder + '/info.log' error_log_file = log_folder + '/error.log'", "else: raise # setup configuration config_file = os.path.join(this_folder, ini_file) fileConfig(config_file,", "ini and log files ini_file = 'debug.ini' info_log_file = log_folder", "check if exists or create log folder try: os.makedirs(log_folder, exist_ok=True)", "%(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s ] ' '%(message).180s', datefmt='%Y-%m-%d %H:%M:%S')", "created 02.02.2021, tkaulke # <NAME>, <EMAIL> # https://github.com/kaulketh # -----------------------------------------------------------", "os.makedirs(log_folder) except OSError as exc: # Python >2.5 if exc.errno", "create handlers handler_info = logging.FileHandler(os.path.join(this_folder, info_log_file)) handler_error = logging.FileHandler(os.path.join(this_folder, error_log_file))", "# create formatters and add to handlers format_info = \\", "<NAME>, <EMAIL> # https://github.com/kaulketh # ----------------------------------------------------------- __author__ = \"<NAME>\" __email__", "# ----------------------------------------------------------- # created 02.02.2021, tkaulke # <NAME>, <EMAIL> #", "exc.errno == errno.EEXIST and os.path.isdir(log_folder): pass else: raise # setup", ">2.5 if exc.errno == errno.EEXIST and os.path.isdir(log_folder): pass else: raise", "except TypeError: try: os.makedirs(log_folder) except OSError as exc: # Python", "ini_file = 'debug.ini' info_log_file = log_folder + '/info.log' error_log_file =", "error_log_file = log_folder + '/error.log' # check if exists or", "https://github.com/kaulketh # ----------------------------------------------------------- __author__ = \"<NAME>\" __email__ = \"<EMAIL>\" import", "define ini and log files ini_file = 'debug.ini' info_log_file =", "= \\ logging.Formatter('%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s ] '", "location log_folder = os.path.join(this_folder, '../logs') # define ini and log", "fileConfig # runtime location this_folder = os.path.dirname(os.path.abspath(__file__)) # define log", "os.path.join(this_folder, '../logs') # define ini and log files ini_file =", "configuration config_file = os.path.join(this_folder, ini_file) fileConfig(config_file, disable_existing_loggers=True) # create handlers", "= \"<EMAIL>\" import errno import logging import os from logging.config", "add handler logger.addHandler(handler_info) logger.addHandler(handler_error) return logger if __name__ == '__main__':", "this_folder = os.path.dirname(os.path.abspath(__file__)) # define log folder related to location", "handlers format_info = \\ logging.Formatter('%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s", "logging.config import fileConfig # runtime location this_folder = os.path.dirname(os.path.abspath(__file__)) #", "logging.FileHandler(os.path.join(this_folder, error_log_file)) # set levels handler_info.setLevel(logging.INFO) handler_error.setLevel(logging.ERROR) # create formatters", "log folder related to location log_folder = os.path.join(this_folder, '../logs') #", "%(message)s') handler_info.setFormatter(format_info) handler_error.setFormatter(format_error) def get_logger(name: str = __name__): logger =", "handler logger.addHandler(handler_info) logger.addHandler(handler_error) return logger if __name__ == '__main__': pass", "# add handler logger.addHandler(handler_info) logger.addHandler(handler_error) return logger if __name__ ==", "# create handlers handler_info = logging.FileHandler(os.path.join(this_folder, info_log_file)) handler_error = logging.FileHandler(os.path.join(this_folder,", "as exc: # Python >2.5 if exc.errno == errno.EEXIST and", "= 'debug.ini' info_log_file = log_folder + '/info.log' error_log_file = log_folder", "] ' '[ thread: %(threadName)s ] %(message)s') handler_info.setFormatter(format_info) handler_error.setFormatter(format_error) def", "os.path.join(this_folder, ini_file) fileConfig(config_file, disable_existing_loggers=True) # create handlers handler_info = logging.FileHandler(os.path.join(this_folder,", "%(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s ] ' '[ thread: %(threadName)s", "format_info = \\ logging.Formatter('%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s ]", "import fileConfig # runtime location this_folder = os.path.dirname(os.path.abspath(__file__)) # define", "coding: utf-8 -*- # ----------------------------------------------------------- # created 02.02.2021, tkaulke #", "# define ini and log files ini_file = 'debug.ini' info_log_file", "folder related to location log_folder = os.path.join(this_folder, '../logs') # define", "exist_ok=True) # Python>3.2 except TypeError: try: os.makedirs(log_folder) except OSError as", "== errno.EEXIST and os.path.isdir(log_folder): pass else: raise # setup configuration", "%(threadName)s ] %(message)s') handler_info.setFormatter(format_info) handler_error.setFormatter(format_error) def get_logger(name: str = __name__):", "import errno import logging import os from logging.config import fileConfig", "exists or create log folder try: os.makedirs(log_folder, exist_ok=True) # Python>3.2", "import logging import os from logging.config import fileConfig # runtime", "exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(log_folder):", "os.path.dirname(os.path.abspath(__file__)) # define log folder related to location log_folder =", "and os.path.isdir(log_folder): pass else: raise # setup configuration config_file =", "' '%(message).180s', datefmt='%Y-%m-%d %H:%M:%S') format_error = \\ logging.Formatter( '%(asctime)s %(levelname)s", "try: os.makedirs(log_folder) except OSError as exc: # Python >2.5 if", "and log files ini_file = 'debug.ini' info_log_file = log_folder +", "try: os.makedirs(log_folder, exist_ok=True) # Python>3.2 except TypeError: try: os.makedirs(log_folder) except", "%(module)s.%(funcName)s linenr.%(lineno)s ] ' '%(message).180s', datefmt='%Y-%m-%d %H:%M:%S') format_error = \\", "\\ logging.Formatter( '%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s ] '", "__name__): logger = logging.getLogger(name) # add handler logger.addHandler(handler_info) logger.addHandler(handler_error) return", "# Python>3.2 except TypeError: try: os.makedirs(log_folder) except OSError as exc:", "runtime location this_folder = os.path.dirname(os.path.abspath(__file__)) # define log folder related", "TypeError: try: os.makedirs(log_folder) except OSError as exc: # Python >2.5", "raise # setup configuration config_file = os.path.join(this_folder, ini_file) fileConfig(config_file, disable_existing_loggers=True)", "fileConfig(config_file, disable_existing_loggers=True) # create handlers handler_info = logging.FileHandler(os.path.join(this_folder, info_log_file)) handler_error", "log_folder = os.path.join(this_folder, '../logs') # define ini and log files", "logging.Formatter('%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s ] ' '%(message).180s', datefmt='%Y-%m-%d", "if exists or create log folder try: os.makedirs(log_folder, exist_ok=True) #", "02.02.2021, tkaulke # <NAME>, <EMAIL> # https://github.com/kaulketh # ----------------------------------------------------------- __author__", "Python>3.2 except TypeError: try: os.makedirs(log_folder) except OSError as exc: #", "setup configuration config_file = os.path.join(this_folder, ini_file) fileConfig(config_file, disable_existing_loggers=True) # create", "errno.EEXIST and os.path.isdir(log_folder): pass else: raise # setup configuration config_file", "# Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(log_folder): pass", "create formatters and add to handlers format_info = \\ logging.Formatter('%(asctime)s", "= os.path.join(this_folder, '../logs') # define ini and log files ini_file", "__email__ = \"<EMAIL>\" import errno import logging import os from", "logging.getLogger(name) # add handler logger.addHandler(handler_info) logger.addHandler(handler_error) return logger if __name__", "# setup configuration config_file = os.path.join(this_folder, ini_file) fileConfig(config_file, disable_existing_loggers=True) #", "to location log_folder = os.path.join(this_folder, '../logs') # define ini and", "= \"<NAME>\" __email__ = \"<EMAIL>\" import errno import logging import", "'../logs') # define ini and log files ini_file = 'debug.ini'", "format_error = \\ logging.Formatter( '%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s", "= __name__): logger = logging.getLogger(name) # add handler logger.addHandler(handler_info) logger.addHandler(handler_error)", "#!/usr/bin/python3 # -*- coding: utf-8 -*- # ----------------------------------------------------------- # created", "<EMAIL> # https://github.com/kaulketh # ----------------------------------------------------------- __author__ = \"<NAME>\" __email__ =", "tkaulke # <NAME>, <EMAIL> # https://github.com/kaulketh # ----------------------------------------------------------- __author__ =", "# ----------------------------------------------------------- __author__ = \"<NAME>\" __email__ = \"<EMAIL>\" import errno", "\"<NAME>\" __email__ = \"<EMAIL>\" import errno import logging import os", "# https://github.com/kaulketh # ----------------------------------------------------------- __author__ = \"<NAME>\" __email__ = \"<EMAIL>\"", "----------------------------------------------------------- # created 02.02.2021, tkaulke # <NAME>, <EMAIL> # https://github.com/kaulketh", "'[ thread: %(threadName)s ] %(message)s') handler_info.setFormatter(format_info) handler_error.setFormatter(format_error) def get_logger(name: str", "to handlers format_info = \\ logging.Formatter('%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s", "# check if exists or create log folder try: os.makedirs(log_folder,", "-*- # ----------------------------------------------------------- # created 02.02.2021, tkaulke # <NAME>, <EMAIL>", "# define log folder related to location log_folder = os.path.join(this_folder,", "os.makedirs(log_folder, exist_ok=True) # Python>3.2 except TypeError: try: os.makedirs(log_folder) except OSError", "errno import logging import os from logging.config import fileConfig #", "# <NAME>, <EMAIL> # https://github.com/kaulketh # ----------------------------------------------------------- __author__ = \"<NAME>\"", "= os.path.join(this_folder, ini_file) fileConfig(config_file, disable_existing_loggers=True) # create handlers handler_info =", "pass else: raise # setup configuration config_file = os.path.join(this_folder, ini_file)", "handler_info.setLevel(logging.INFO) handler_error.setLevel(logging.ERROR) # create formatters and add to handlers format_info", "log files ini_file = 'debug.ini' info_log_file = log_folder + '/info.log'", "define log folder related to location log_folder = os.path.join(this_folder, '../logs')", "info_log_file)) handler_error = logging.FileHandler(os.path.join(this_folder, error_log_file)) # set levels handler_info.setLevel(logging.INFO) handler_error.setLevel(logging.ERROR)", "+ '/error.log' # check if exists or create log folder", "logging import os from logging.config import fileConfig # runtime location", "thread: %(threadName)s ] %(message)s') handler_info.setFormatter(format_info) handler_error.setFormatter(format_error) def get_logger(name: str =", "os from logging.config import fileConfig # runtime location this_folder =", "formatters and add to handlers format_info = \\ logging.Formatter('%(asctime)s %(levelname)s", "] %(message)s') handler_info.setFormatter(format_info) handler_error.setFormatter(format_error) def get_logger(name: str = __name__): logger", "logging.Formatter( '%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s ] ' '[", "logging.FileHandler(os.path.join(this_folder, info_log_file)) handler_error = logging.FileHandler(os.path.join(this_folder, error_log_file)) # set levels handler_info.setLevel(logging.INFO)", "set levels handler_info.setLevel(logging.INFO) handler_error.setLevel(logging.ERROR) # create formatters and add to", "except OSError as exc: # Python >2.5 if exc.errno ==", "Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(log_folder): pass else:", "related to location log_folder = os.path.join(this_folder, '../logs') # define ini", "disable_existing_loggers=True) # create handlers handler_info = logging.FileHandler(os.path.join(this_folder, info_log_file)) handler_error =", "OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST", "or create log folder try: os.makedirs(log_folder, exist_ok=True) # Python>3.2 except", "os.path.isdir(log_folder): pass else: raise # setup configuration config_file = os.path.join(this_folder,", "str = __name__): logger = logging.getLogger(name) # add handler logger.addHandler(handler_info)", "error_log_file)) # set levels handler_info.setLevel(logging.INFO) handler_error.setLevel(logging.ERROR) # create formatters and", "'[ %(module)s.%(funcName)s linenr.%(lineno)s ] ' '[ thread: %(threadName)s ] %(message)s')", "get_logger(name: str = __name__): logger = logging.getLogger(name) # add handler", "# created 02.02.2021, tkaulke # <NAME>, <EMAIL> # https://github.com/kaulketh #", "log folder try: os.makedirs(log_folder, exist_ok=True) # Python>3.2 except TypeError: try:", "handler_info = logging.FileHandler(os.path.join(this_folder, info_log_file)) handler_error = logging.FileHandler(os.path.join(this_folder, error_log_file)) # set", "handlers handler_info = logging.FileHandler(os.path.join(this_folder, info_log_file)) handler_error = logging.FileHandler(os.path.join(this_folder, error_log_file)) #", "\\ logging.Formatter('%(asctime)s %(levelname)s ' '[ %(module)s.%(funcName)s linenr.%(lineno)s ] ' '%(message).180s',", "handler_error.setLevel(logging.ERROR) # create formatters and add to handlers format_info =", "handler_info.setFormatter(format_info) handler_error.setFormatter(format_error) def get_logger(name: str = __name__): logger = logging.getLogger(name)" ]
[ "True, use_propensity = True, test_size = 0) uplift_model.best_params_net y_test, x_test,", "rmse_tolerance = .05 num_obs = 10000 param_grid = dict(num_nodes=[8], dropout=[.1],", "t) def test_prepare_data_optimized_loss_one_col_tmt(self): num_obs = 1000 y, x, t =", "= np.mean((optim_treatments_no_cuttoff_cat==1 ) == x_test[:,0]) assert correct_tmts_1>TOLERANCE assert correct_tmts_2>TOLERANCE assert", "= param_grid, optimized_loss = True) oos_re = uplift_model.get_random_erupts() uplift_model_propensity =", "= 1000 y, x, t = get_simple_uplift_data(num_obs) t = t.reshape(-1,", "get_observational_uplift_data_1(num_obs) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[1], epochs=[20], batch_size=[512], alpha", "x_test, use_propensity_score_cutoff = True) optim_treatments_cuttoff_cat = optim_treatments_cuttoff.argmax(axis = 1) optim_treatments_no_cuttoff_cat", "unique_treatments.shape[0], y.shape[1])) for q in range(unique_treatments.shape[0]): assert( ((missing_utility[:,q]==0) == (missing_y_mat[:,q,0]", "== 1)[0]] == 1) == x_test[np.where(experiment_groups == 1)[0],0]).mean() correct_tmts_no_cutoff =", "axis=1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'], num_layers=[1], epochs=[1], batch_size=[1000])", "= get_simple_uplift_data(num_obs) t = t.reshape(-1, 1) param_grid = dict(num_nodes=[8], dropout=[.1],", "( np.unique(t, axis=0).shape[0], num_obs * .7, y.shape[1]) assert uplift_model.predict_ice(x=x).shape ==", "[0,1] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 0)[0]] ]).mean() correct_tmts_2 =", "copy_several_times = [1]) uplift_model = MRUplift() uplift_model.fit(x, y, t, param_grid", "get_no_noise_data, get_simple_uplift_data, get_observational_uplift_data_1 from mr_uplift.mr_uplift import MRUplift, get_t_data from mr_uplift.keras_model_functionality", "correct_tmts_3 = np.array([x in [0,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups ==", "test_model_pred_oos_shapes_single_col_tmt(self): num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs) t", "MRUplift() uplift_model_propensity.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss", "uplift_model.get_erupt_curves(x = x, y = y, t = t) assert", "== (missing_y_mat[:,q,0] == -999)).mean() ==1 ) def test_model_optim_mean_outputs(self): true_ATE =", "= MRUplift() uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1) x_1", "uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff = True) optim_treatments_cuttoff_cat = optim_treatments_cuttoff.argmax(axis =", "test_model_optim_mean_outputs(self): true_ATE = np.array([[0, 0], [1, .5]]) rmse_tolerance = .05", "y.shape[1]) assert uplift_model.predict_ice(x=x).shape == (np.unique(t,axis=0).shape[0], num_obs, y.shape[1]) assert uplift_model.get_erupt_curves() assert", "uplift_model = MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1) oos_ice =", "= MRUplift() uplift_model_propensity.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid,", "missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t, masks ,unique_treatments) assert(utility_weights.shape == (num_obs,", "1 assert np.mean(test_2 == test_2_values) == 1 def test_model_mean_outputs(self): true_ATE", "activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y, x, t = get_simple_uplift_data(num_obs) uplift_model", "param_grid = param_grid, n_jobs=1, optimized_loss = True, use_propensity = True,", "1 def test_model_mean_outputs(self): true_ATE = np.array([[0, 0], [1, .5]]) rmse_tolerance", "= get_no_noise_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1)", "1) optim_treatments_no_cuttoff_cat = optim_treatments_no_cuttoff.argmax(axis = 1) correct_tmts_1 = np.array([x in", "batch_size=[100], alpha = [.5], copy_several_times = [2]) uplift_model_propensity = MRUplift()", "mr_uplift.mr_uplift import MRUplift, get_t_data from mr_uplift.keras_model_functionality import prepare_data_optimized_loss import sys", "= get_simple_uplift_data(num_obs) t = t.reshape(len(t),1) unique_treatments = np.unique(t, axis =", "np.zeros(num_obs_1).reshape(-1, 1) test_2_values = np.concatenate([np.zeros(num_obs_2).reshape(-1, 1), np.ones(num_obs_2).reshape(-1, 1)], axis=1) assert", "10000 param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y_no_noise,", "uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) -true_ATE)**2)) < rmse_tolerance def test_model_pred_oos_shapes(self):", "y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss = False)", "import numpy as np import pytest from mr_uplift.dataset.data_simulation import get_no_noise_data,", "= optim_treatments_cuttoff.argmax(axis = 1) optim_treatments_no_cuttoff_cat = optim_treatments_no_cuttoff.argmax(axis = 1) correct_tmts_1", "param_grid, optimized_loss = True, use_propensity = True) oos_re_propensity = uplift_model_propensity.get_random_erupts()", "y, t = t) def test_prepare_data_optimized_loss_one_col_tmt(self): num_obs = 1000 y,", "optim_treatments_no_cuttoff = uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff = False) optim_treatments_cuttoff =", "= [.5], copy_several_times = [1]) uplift_model = MRUplift() uplift_model.fit(x, y,", "1), np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1) param_grid = dict(num_nodes=[8], dropout=[.1],", "10000 TOLERANCE = .98 y, x, t, rule_assignment = get_observational_uplift_data_1(num_obs)", "uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1) oos_ice = uplift_model.predict_ice(response_transformer = True)", "x, t = get_simple_uplift_data(num_obs) t = t.reshape(len(t),1) unique_treatments = np.unique(t,", "dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[1], batch_size=[100], alpha = [.5], copy_several_times", "np.ones(num_obs*len(unique_treatments)).reshape(num_obs,len(unique_treatments)) x, utility_weights, missing_utility, missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t,masks, unique_treatments)", "assert( ((missing_utility[:,q]==0) == (missing_y_mat[:,q,0] == -999)).mean() ==1 ) def test_prepare_data_optimized_loss_two_col_tmt(self):", "= dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y, x, t", "= dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'], num_layers=[1], epochs=[1], batch_size=[1000]) uplift_model =", "= get_observational_uplift_data_1(num_obs) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[1], epochs=[20], batch_size=[512],", "as np import pytest from mr_uplift.dataset.data_simulation import get_no_noise_data, get_simple_uplift_data, get_observational_uplift_data_1", "= 0 experiment_groups[np.where(x_test[:,-1]>.8)[0]] = 3 optim_treatments_no_cuttoff = uplift_model.predict_optimal_treatments(x = x_test,", "y, t = t) def test_model_pred_oos_shapes_single_col_tmt_propensity(self): num_obs = 1000 y,", "= dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y_no_noise, x_no_noise, tmt_no_noise", "n_jobs=1, optimized_loss = True, use_propensity = True, test_size = 0)", "uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) - true_ATE)**2)) < rmse_tolerance def", "== 3)[0]] ]).mean() correct_tmts_experiment_groups_1 = ((optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] == 1)", "= t) def test_model_pred_oos_shapes_single_col_tmt_propensity(self): num_obs = 1000 y, x, t", "optimized_loss = True, use_propensity = True) assert uplift_model.predict_ice().shape == (", "experiment_groups[np.where(x_test[:,-2]<.5)[0]] = 1 experiment_groups[np.where(x_test[:,-2]<.33)[0]] = 0 experiment_groups[np.where(x_test[:,-1]>.8)[0]] = 3 optim_treatments_no_cuttoff", "missing_utility, missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t,masks, unique_treatments) assert(utility_weights.shape == (num_obs,", "np.zeros(num_obs)+2 experiment_groups[np.where(x_test[:,-2]<.5)[0]] = 1 experiment_groups[np.where(x_test[:,-2]<.33)[0]] = 0 experiment_groups[np.where(x_test[:,-1]>.8)[0]] = 3", "True, use_propensity = True) oos_re_propensity = uplift_model_propensity.get_random_erupts() assert oos_re['mean'].iloc[0] >", "in [1,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] ]).mean() correct_tmts_3", "dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y, x, t = get_simple_uplift_data(num_obs)", "get_simple_uplift_data(num_obs) t = t.reshape(-1, 1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=[", "get_simple_uplift_data(num_obs) t = np.concatenate([t.reshape(-1, 1), np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1)", "activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model", "= get_simple_uplift_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x, y, t.reshape(-1, 1), n_jobs=1,", ".05 num_obs = 10000 y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model", "y, t.reshape(-1, 1), n_jobs=1, param_grid = param_grid) varimp = uplift_model.permutation_varimp(objective_weights", "epochs=[30], batch_size=[100], alpha = [.5], copy_several_times = [2]) y_no_noise, x_no_noise,", "[1,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] ]).mean() correct_tmts_3 =", "3)[0]] ]).mean() correct_tmts_experiment_groups_1 = ((optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] == 1) ==", "t = get_simple_uplift_data(num_obs) t = np.concatenate([t.reshape(-1, 1), np.random.binomial(1, .5, num_obs).reshape(-1,", "num_obs).reshape(-1, 1)], axis=1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'], num_layers=[1],", "= np.ones(num_obs).reshape(num_obs,1) x, utility_weights, missing_utility, missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t,", "= param_grid, optimized_loss = True, use_propensity = True) oos_re_propensity =", "assert uplift_model_named.get_erupt_curves() def test_model_pred_oos_shapes_single_col_tmt(self): num_obs = 1000 y, x, t", "1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'], num_layers=[1], epochs=[1], batch_size=[1000])", "test_prepare_data_optimized_loss_two_col_tmt(self): num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs) t", "n_jobs=1, param_grid = param_grid) varimp = uplift_model.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) param_grid", "= x, y = y, t = t) assert uplift_model_named.get_erupt_curves()", "y.copy() y_1 = pd.DataFrame(y_1) y_1.columns = ['var_'+str(x) for x in", "assert np.mean(test_1 == test_1_values) == 1 assert np.mean(test_2 == test_2_values)", "n_jobs=1) x_1 = x.copy() x_1 = pd.DataFrame(x_1) x_1.columns = ['var_'+str(x)", "[.5], copy_several_times = [2]) uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x, y, t.reshape(-1,", "test_varimp(self): num_obs = 10000 param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2],", "= param_grid, n_jobs=1) x_1 = x.copy() x_1 = pd.DataFrame(x_1) x_1.columns", "= param_grid, n_jobs=1) assert uplift_model.predict_ice().shape == ( np.unique(t, axis=0).shape[0], num_obs", "oos_re_propensity = uplift_model_propensity.get_random_erupts() assert oos_re['mean'].iloc[0] > 0 assert oos_re_propensity['mean'].iloc[0] >", "t = t.reshape(-1, 1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'],", "= np.concatenate([t.reshape(-1, 1), np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1) param_grid =", "np.sqrt(np.mean((oos_ice.mean(axis=1) - true_ATE)**2)) < rmse_tolerance def test_model_get_random_erupts(self): true_ATE = np.array([[0,", "y, x, t = get_simple_uplift_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x, y,", "= uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) - true_ATE)**2)) < rmse_tolerance", "uplift_model = MRUplift() uplift_model.fit(x, y[:,0].reshape(-1,1), t, param_grid = param_grid, n_jobs=1,", "activation=[ 'relu'], num_layers=[1], epochs=[1], batch_size=[1000]) uplift_model = MRUplift() uplift_model.fit(x, y,", "get_simple_uplift_data(num_obs) t = t.reshape(-1, 1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'],", "== -999)).mean() ==1 ) def test_model_optim_mean_outputs(self): true_ATE = np.array([[0, 0],", "in range(y.shape[1])] uplift_model_named = MRUplift() uplift_model_named.fit(x_1, y_1, t, param_grid =", "1000 y, x, t = get_simple_uplift_data(num_obs) t = np.concatenate([t.reshape(-1, 1),", "= True) oos_re_propensity = uplift_model_propensity.get_random_erupts() assert oos_re['mean'].iloc[0] > 0 assert", "= ((optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] == 1) == x_test[np.where(experiment_groups == 1)[0],0]).mean()", "y = y, t = t) def test_model_pred_oos_shapes_single_col_tmt_propensity(self): num_obs =", "x_1 = pd.DataFrame(x_1) x_1.columns = ['var_'+str(x) for x in range(x.shape[1])]", "= param_grid, optimized_loss = True, use_propensity = True) varimp_propensity =", "]).mean() correct_tmts_2 = np.array([x in [1,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups", "masks, weights = prepare_data_optimized_loss(x,y,t, masks ,unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1]))", "correct_tmts_experiment_groups_1 = ((optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] == 1) == x_test[np.where(experiment_groups ==", "masks, weights = prepare_data_optimized_loss(x,y,t,masks, unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape", "MRUplift() uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1) x_1 =", "optimized_loss = True, use_propensity = True, test_size = 0) uplift_model.best_params_net", "np.array([x in [0,1] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 0)[0]] ]).mean()", ") == x_test[:,0]) assert correct_tmts_1>TOLERANCE assert correct_tmts_2>TOLERANCE assert correct_tmts_3>TOLERANCE assert", "use_propensity = True) assert uplift_model.predict_ice().shape == ( np.unique(t, axis=0).shape[0], num_obs", "num_obs = 10000 param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30],", "[.5], copy_several_times = [1]) uplift_model = MRUplift() uplift_model.fit(x, y, t,", "uplift_model.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30],", "copy_several_times = [2]) uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x, y, t.reshape(-1, 1),", "test_size = 0) uplift_model.best_params_net y_test, x_test, t_test, rule_assignment_test = get_observational_uplift_data_1(num_obs)", "= param_grid, n_jobs=1, optimized_loss = True, use_propensity = True, test_size", "= ['var_'+str(x) for x in range(x.shape[1])] y_1 = y.copy() y_1", "tmt_no_noise.reshape(-1, 1), n_jobs=1) oos_ice = uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1)", "= [.9999,.99], copy_several_times = [1]) uplift_model = MRUplift() uplift_model.fit(x, y[:,0].reshape(-1,1),", "np.sqrt(np.mean((oos_ice.mean(axis=1) -true_ATE)**2)) < rmse_tolerance def test_model_pred_oos_shapes(self): num_obs = 1000 y,", "= MRUplift() uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1, optimized_loss", "pd.DataFrame(x_1) x_1.columns = ['var_'+str(x) for x in range(x.shape[1])] y_1 =", "= .05 num_obs = 10000 param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'],", "= y.copy() y_1 = pd.DataFrame(y_1) y_1.columns = ['var_'+str(x) for x", "= np.concatenate([t.reshape(-1, 1), np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1) unique_treatments =", "t = get_simple_uplift_data(num_obs) t = t.reshape(-1, 1) param_grid = dict(num_nodes=[8],", "= True, use_propensity = True) oos_re_propensity = uplift_model_propensity.get_random_erupts() assert oos_re['mean'].iloc[0]", "t, rule_assignment = get_observational_uplift_data_1(num_obs) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[1],", "varimp_propensity['permutation_varimp_metric'].iloc[0]>varimp_propensity['permutation_varimp_metric'].iloc[1] def test_model_propensity(self): num_obs = 10000 TOLERANCE = .98 y,", "== test_2_values) == 1 def test_model_mean_outputs(self): true_ATE = np.array([[0, 0],", "def test_prepare_data_optimized_loss_one_col_tmt(self): num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs)", "test_prepare_data_optimized_loss_one_col_tmt(self): num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs) t", "= get_t_data(0, num_obs_1) test_2 = get_t_data(np.array([0, 1]), num_obs_2) test_1_values =", "t.reshape(len(t),1) unique_treatments = np.unique(t, axis = 0) masks = np.ones(num_obs).reshape(num_obs,1)", "uplift_model.get_erupt_curves(x = x, y = y, t = t) def", "MRUplift() uplift_model.fit(x, y[:,0].reshape(-1,1), t, param_grid = param_grid, n_jobs=1, optimized_loss =", "y_1 = y.copy() y_1 = pd.DataFrame(y_1) y_1.columns = ['var_'+str(x) for", "True) optim_treatments_cuttoff_cat = optim_treatments_cuttoff.argmax(axis = 1) optim_treatments_no_cuttoff_cat = optim_treatments_no_cuttoff.argmax(axis =", "uplift_model = MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid =", "assert oos_re_propensity['mean'].iloc[0] > 0 def test_varimp(self): num_obs = 10000 param_grid", "== ( np.unique(t, axis=0).shape[0], num_obs * .7, y.shape[1]) assert uplift_model.predict_ice(x=x).shape", "np.concatenate([np.zeros(num_obs_2).reshape(-1, 1), np.ones(num_obs_2).reshape(-1, 1)], axis=1) assert np.mean(test_1 == test_1_values) ==", "n_jobs=1) oos_ice = uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) -true_ATE)**2)) <", "utility_weights, missing_utility, missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t,masks, unique_treatments) assert(utility_weights.shape ==", "def test_model_optim_mean_outputs(self): true_ATE = np.array([[0, 0], [1, .5]]) rmse_tolerance =", "def test_varimp(self): num_obs = 10000 param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'],", "1)], axis=1) unique_treatments = np.unique(t, axis = 0) masks =", "activation=['relu'], num_layers=[1], epochs=[20], batch_size=[512], alpha = [.9999,.99], copy_several_times = [1])", "y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss = True)", "varimp = uplift_model.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'],", "tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss = False) oos_ice", "param_grid, n_jobs=1) x_1 = x.copy() x_1 = pd.DataFrame(x_1) x_1.columns =", "def test_model_get_random_erupts(self): true_ATE = np.array([[0, 0], [1, .5]]) rmse_tolerance =", "t = np.concatenate([t.reshape(-1, 1), np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1) param_grid", "range(unique_treatments.shape[0]): assert( ((missing_utility[:,q]==0) == (missing_y_mat[:,q,0] == -999)).mean() ==1 ) def", "(missing_y_mat[:,q,0] == -999)).mean() ==1 ) def test_model_optim_mean_outputs(self): true_ATE = np.array([[0,", "[0] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 3)[0]] ]).mean() correct_tmts_experiment_groups_1 =", "missing_utility, missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t, masks ,unique_treatments) assert(utility_weights.shape ==", "True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) - true_ATE)**2)) < rmse_tolerance def test_model_get_random_erupts(self): true_ATE", "= True) optim_treatments_cuttoff_cat = optim_treatments_cuttoff.argmax(axis = 1) optim_treatments_no_cuttoff_cat = optim_treatments_no_cuttoff.argmax(axis", "for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 3)[0]] ]).mean() correct_tmts_experiment_groups_1 = ((optim_treatments_cuttoff_cat[np.where(experiment_groups", "correct_tmts_2>TOLERANCE assert correct_tmts_3>TOLERANCE assert correct_tmts_4>TOLERANCE assert correct_tmts_experiment_groups_1>TOLERANCE assert np.array_equal(optim_treatments_cuttoff_cat,optim_treatments_no_cuttoff_cat) is", "correct_tmts_1>TOLERANCE assert correct_tmts_2>TOLERANCE assert correct_tmts_3>TOLERANCE assert correct_tmts_4>TOLERANCE assert correct_tmts_experiment_groups_1>TOLERANCE assert", "param_grid = param_grid, optimized_loss = False) oos_ice = uplift_model.predict_ice(response_transformer =", "np.mean(test_2 == test_2_values) == 1 def test_model_mean_outputs(self): true_ATE = np.array([[0,", "0 experiment_groups[np.where(x_test[:,-1]>.8)[0]] = 3 optim_treatments_no_cuttoff = uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff", "assert correct_tmts_4>TOLERANCE assert correct_tmts_experiment_groups_1>TOLERANCE assert np.array_equal(optim_treatments_cuttoff_cat,optim_treatments_no_cuttoff_cat) is False assert correct_tmts_no_cutoff>TOLERANCE", "= np.ones(num_obs*len(unique_treatments)).reshape(num_obs,len(unique_treatments)) x, utility_weights, missing_utility, missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t,masks,", "use_propensity = True) varimp_propensity = uplift_model_propensity.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) assert varimp['permutation_varimp_metric'].iloc[0]>varimp['permutation_varimp_metric'].iloc[1]", "= MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1) oos_ice = uplift_model.predict_ice(response_transformer", "param_grid = param_grid, optimized_loss = True, use_propensity = True) varimp_propensity", "= get_simple_uplift_data(num_obs) t = np.concatenate([t.reshape(-1, 1), np.random.binomial(1, .5, num_obs).reshape(-1, 1)],", "np.array([.7,-.3,0]).reshape(1,-1)) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100], alpha", "1), np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1) unique_treatments = np.unique(t, axis", "in [0] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 3)[0]] ]).mean() correct_tmts_experiment_groups_1", "0) uplift_model.best_params_net y_test, x_test, t_test, rule_assignment_test = get_observational_uplift_data_1(num_obs) experiment_groups =", "* .7, y.shape[1]) assert uplift_model.predict_ice(x=x).shape == (np.unique(t,axis=0).shape[0], num_obs, y.shape[1]) assert", "uplift_model.predict_ice(x=x).shape == (np.unique(t,axis=0).shape[0], num_obs, y.shape[1]) assert uplift_model.get_erupt_curves() assert uplift_model.get_erupt_curves(x =", "epochs=[1], batch_size=[1000]) uplift_model = MRUplift() uplift_model.fit(x, y, t, param_grid =", "uplift_model_propensity.fit(x, y, t.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss =", "batch_size=[512], alpha = [.9999,.99], copy_several_times = [1]) uplift_model = MRUplift()", "x_test, t_test, rule_assignment_test = get_observational_uplift_data_1(num_obs) experiment_groups = np.zeros(num_obs)+2 experiment_groups[np.where(x_test[:,-2]<.5)[0]] =", "batch_size=[100], alpha = [.5], copy_several_times = [1]) uplift_model = MRUplift()", "= x_test, use_propensity_score_cutoff = True) optim_treatments_cuttoff_cat = optim_treatments_cuttoff.argmax(axis = 1)", "((optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] == 1) == x_test[np.where(experiment_groups == 1)[0],0]).mean() correct_tmts_no_cutoff", "axis = 0) masks = np.ones(num_obs).reshape(num_obs,1) x, utility_weights, missing_utility, missing_y_mat,", "param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y, x,", "import prepare_data_optimized_loss import sys import pandas as pd class TestMRUplift(object):", "MRUplift, get_t_data from mr_uplift.keras_model_functionality import prepare_data_optimized_loss import sys import pandas", "MRUplift() uplift_model.fit(x, y, t.reshape(-1, 1), n_jobs=1, param_grid = param_grid) varimp", "rule_assignment_test = get_observational_uplift_data_1(num_obs) experiment_groups = np.zeros(num_obs)+2 experiment_groups[np.where(x_test[:,-2]<.5)[0]] = 1 experiment_groups[np.where(x_test[:,-2]<.33)[0]]", "n_jobs=1, param_grid = param_grid, optimized_loss = True, use_propensity = True)", "= np.concatenate([np.zeros(num_obs_2).reshape(-1, 1), np.ones(num_obs_2).reshape(-1, 1)], axis=1) assert np.mean(test_1 == test_1_values)", ".5, num_obs).reshape(-1, 1)], axis=1) unique_treatments = np.unique(t, axis = 0)", "= 1000 y, x, t = get_simple_uplift_data(num_obs) t = np.concatenate([t.reshape(-1,", "num_obs * .7, y.shape[1]) assert uplift_model.predict_ice(x=x).shape == (np.unique(t,axis=0).shape[0], num_obs, y.shape[1])", "x_1 = x.copy() x_1 = pd.DataFrame(x_1) x_1.columns = ['var_'+str(x) for", "= x, y = y, t = t) def test_prepare_data_optimized_loss_one_col_tmt(self):", "= 3 test_1 = get_t_data(0, num_obs_1) test_2 = get_t_data(np.array([0, 1]),", "x_test[:,0]) assert correct_tmts_1>TOLERANCE assert correct_tmts_2>TOLERANCE assert correct_tmts_3>TOLERANCE assert correct_tmts_4>TOLERANCE assert", "= MRUplift() uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1) assert", "[1, .5]]) rmse_tolerance = .05 num_obs = 10000 param_grid =", "y, t, param_grid = param_grid, n_jobs=1) x_1 = x.copy() x_1", "activation=['relu'], num_layers=[2], epochs=[1], batch_size=[100], alpha = [.5], copy_several_times = [1])", "y, t.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss = True,", "assert(missing_y_mat.shape == (num_obs, unique_treatments.shape[0], y.shape[1])) for q in range(unique_treatments.shape[0]): assert(", "num_layers=[1], epochs=[1], batch_size=[1000]) uplift_model = MRUplift() uplift_model.fit(x, y, t, param_grid", "prepare_data_optimized_loss(x,y,t, masks ,unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape == (num_obs,", "assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape == (num_obs, unique_treatments.shape[0], y.shape[1])) for", "= [1]) uplift_model = MRUplift() uplift_model.fit(x, y, t, param_grid =", "oos_re_propensity['mean'].iloc[0] > 0 def test_varimp(self): num_obs = 10000 param_grid =", "= x, y = y, t = t) def test_model_pred_oos_shapes_single_col_tmt_propensity(self):", "from mr_uplift.dataset.data_simulation import get_no_noise_data, get_simple_uplift_data, get_observational_uplift_data_1 from mr_uplift.mr_uplift import MRUplift,", "num_obs_1) test_2 = get_t_data(np.array([0, 1]), num_obs_2) test_1_values = np.zeros(num_obs_1).reshape(-1, 1)", "axis=1) unique_treatments = np.unique(t, axis = 0) masks = np.ones(num_obs*len(unique_treatments)).reshape(num_obs,len(unique_treatments))", "unique_treatments = np.unique(t, axis = 0) masks = np.ones(num_obs*len(unique_treatments)).reshape(num_obs,len(unique_treatments)) x,", "assert np.sqrt(np.mean((oos_ice.mean(axis=1) -true_ATE)**2)) < rmse_tolerance def test_model_pred_oos_shapes(self): num_obs = 1000", "1), n_jobs=1, param_grid = param_grid) varimp = uplift_model.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1))", "optim_treatments_cuttoff = uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff = True) optim_treatments_cuttoff_cat =", "numpy as np import pytest from mr_uplift.dataset.data_simulation import get_no_noise_data, get_simple_uplift_data,", "t = t) def test_model_pred_oos_shapes_single_col_tmt_propensity(self): num_obs = 1000 y, x,", "= t) assert uplift_model_named.get_erupt_curves() def test_model_pred_oos_shapes_single_col_tmt(self): num_obs = 1000 y,", "= dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[1], batch_size=[100], alpha = [.5],", "y[:,0].reshape(-1,1), t, param_grid = param_grid, n_jobs=1, optimized_loss = True, use_propensity", "np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=[", "get_simple_uplift_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x, y, t.reshape(-1, 1), n_jobs=1, param_grid", "np.concatenate([t.reshape(-1, 1), np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1) param_grid = dict(num_nodes=[8],", "x, t = get_simple_uplift_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x, y, t.reshape(-1,", "== (missing_y_mat[:,q,0] == -999)).mean() ==1 ) def test_prepare_data_optimized_loss_two_col_tmt(self): num_obs =", "1)[0]] == 1) == x_test[np.where(experiment_groups == 1)[0],0]).mean() correct_tmts_no_cutoff = np.mean((optim_treatments_no_cuttoff_cat==1", "def test_model_propensity(self): num_obs = 10000 TOLERANCE = .98 y, x,", "t = t) assert uplift_model_named.get_erupt_curves() def test_model_pred_oos_shapes_single_col_tmt(self): num_obs = 1000", "uplift_model_named.fit(x_1, y_1, t, param_grid = param_grid, n_jobs=1) assert uplift_model.predict_ice().shape ==", ".98 y, x, t, rule_assignment = get_observational_uplift_data_1(num_obs) param_grid = dict(num_nodes=[8],", "x, t, rule_assignment = get_observational_uplift_data_1(num_obs) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'],", "True) assert uplift_model.predict_ice().shape == ( np.unique(t, axis=0).shape[0], num_obs * .7,", "test_2_values = np.concatenate([np.zeros(num_obs_2).reshape(-1, 1), np.ones(num_obs_2).reshape(-1, 1)], axis=1) assert np.mean(test_1 ==", "y, x, t = get_simple_uplift_data(num_obs) t = t.reshape(-1, 1) param_grid", "dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'], num_layers=[1], epochs=[1], batch_size=[1000]) uplift_model = MRUplift()", "0) masks = np.ones(num_obs).reshape(num_obs,1) x, utility_weights, missing_utility, missing_y_mat, masks, weights", "]).mean() correct_tmts_3 = np.array([x in [0,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups", "def test_get_t_data(self): num_obs_1 = 10 num_obs_2 = 3 test_1 =", "t = np.concatenate([t.reshape(-1, 1), np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1) unique_treatments", "2)[0]] ]).mean() correct_tmts_4 = np.array([x in [0] for x in", "= get_observational_uplift_data_1(num_obs) experiment_groups = np.zeros(num_obs)+2 experiment_groups[np.where(x_test[:,-2]<.5)[0]] = 1 experiment_groups[np.where(x_test[:,-2]<.33)[0]] =", "tmt_no_noise = get_no_noise_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1),", "1), np.ones(num_obs_2).reshape(-1, 1)], axis=1) assert np.mean(test_1 == test_1_values) == 1", "x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 2)[0]] ]).mean() correct_tmts_4 = np.array([x in", "y, x, t = get_simple_uplift_data(num_obs) t = np.concatenate([t.reshape(-1, 1), np.random.binomial(1,", "pytest from mr_uplift.dataset.data_simulation import get_no_noise_data, get_simple_uplift_data, get_observational_uplift_data_1 from mr_uplift.mr_uplift import", "np import pytest from mr_uplift.dataset.data_simulation import get_no_noise_data, get_simple_uplift_data, get_observational_uplift_data_1 from", "for x in range(x.shape[1])] y_1 = y.copy() y_1 = pd.DataFrame(y_1)", "test_model_get_random_erupts(self): true_ATE = np.array([[0, 0], [1, .5]]) rmse_tolerance = .05", "correct_tmts_4 = np.array([x in [0] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups ==", "get_t_data(0, num_obs_1) test_2 = get_t_data(np.array([0, 1]), num_obs_2) test_1_values = np.zeros(num_obs_1).reshape(-1,", "x, t = get_simple_uplift_data(num_obs) t = t.reshape(-1, 1) param_grid =", "dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100], alpha = [.5], copy_several_times =", "= 3 optim_treatments_no_cuttoff = uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff = False)", "axis=0).shape[0], num_obs * .7, y.shape[1]) assert uplift_model.predict_ice(x=x).shape == (np.unique(t,axis=0).shape[0], num_obs,", "assert correct_tmts_1>TOLERANCE assert correct_tmts_2>TOLERANCE assert correct_tmts_3>TOLERANCE assert correct_tmts_4>TOLERANCE assert correct_tmts_experiment_groups_1>TOLERANCE", "= 10000 TOLERANCE = .98 y, x, t, rule_assignment =", "param_grid = param_grid, n_jobs=1, optimized_loss = True, use_propensity = True)", "batch_size=[100], alpha = [.5], copy_several_times = [2]) y_no_noise, x_no_noise, tmt_no_noise", "x, y = y, t = t) assert uplift_model_named.get_erupt_curves() def", "= np.array([.7,-.3,0]).reshape(1,-1)) assert varimp['permutation_varimp_metric'].iloc[0]>varimp['permutation_varimp_metric'].iloc[1] assert varimp_propensity['permutation_varimp_metric'].iloc[0]>varimp_propensity['permutation_varimp_metric'].iloc[1] def test_model_propensity(self): num_obs =", "test_2_values) == 1 def test_model_mean_outputs(self): true_ATE = np.array([[0, 0], [1,", "uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff = False) optim_treatments_cuttoff = uplift_model.predict_optimal_treatments(x =", "((missing_utility[:,q]==0) == (missing_y_mat[:,q,0] == -999)).mean() ==1 ) def test_model_optim_mean_outputs(self): true_ATE", "param_grid, n_jobs=1, optimized_loss = True, use_propensity = True, test_size =", "get_simple_uplift_data, get_observational_uplift_data_1 from mr_uplift.mr_uplift import MRUplift, get_t_data from mr_uplift.keras_model_functionality import", "= uplift_model_propensity.get_random_erupts() assert oos_re['mean'].iloc[0] > 0 assert oos_re_propensity['mean'].iloc[0] > 0", "= x.copy() x_1 = pd.DataFrame(x_1) x_1.columns = ['var_'+str(x) for x", "= MRUplift() uplift_model_propensity.fit(x, y, t.reshape(-1, 1), n_jobs=1, param_grid = param_grid,", "param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100], alpha =", "y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss = True,", "== 1)[0],0]).mean() correct_tmts_no_cutoff = np.mean((optim_treatments_no_cuttoff_cat==1 ) == x_test[:,0]) assert correct_tmts_1>TOLERANCE", "= ['var_'+str(x) for x in range(y.shape[1])] uplift_model_named = MRUplift() uplift_model_named.fit(x_1,", "dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100], alpha = [.5], copy_several_times", "y_1 = pd.DataFrame(y_1) y_1.columns = ['var_'+str(x) for x in range(y.shape[1])]", "= 0) masks = np.ones(num_obs*len(unique_treatments)).reshape(num_obs,len(unique_treatments)) x, utility_weights, missing_utility, missing_y_mat, masks,", "==1 ) def test_model_optim_mean_outputs(self): true_ATE = np.array([[0, 0], [1, .5]])", "weights = prepare_data_optimized_loss(x,y,t, masks ,unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape", "= np.array([x in [0] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 3)[0]]", "1)], axis=1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'], num_layers=[1], epochs=[1],", "uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1) assert uplift_model.predict_ice().shape ==", "prepare_data_optimized_loss(x,y,t,masks, unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape == (num_obs, unique_treatments.shape[0],", "10000 param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100], alpha", "param_grid, optimized_loss = True, use_propensity = True) varimp_propensity = uplift_model_propensity.permutation_varimp(objective_weights", "epochs=[30], batch_size=[100], alpha = [.5], copy_several_times = [2]) uplift_model_propensity =", "uplift_model_named = MRUplift() uplift_model_named.fit(x_1, y_1, t, param_grid = param_grid, n_jobs=1)", "true_ATE)**2)) < rmse_tolerance def test_model_get_random_erupts(self): true_ATE = np.array([[0, 0], [1,", "np.array([x in [0,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 2)[0]] ]).mean()", "(num_obs, y.shape[1])) assert(missing_y_mat.shape == (num_obs, unique_treatments.shape[0], y.shape[1])) for q in", "batch_size=[100]) y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x_no_noise,", "test_model_pred_oos_shapes(self): num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs) t", "= True) varimp_propensity = uplift_model_propensity.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) assert varimp['permutation_varimp_metric'].iloc[0]>varimp['permutation_varimp_metric'].iloc[1] assert", "= uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff = False) optim_treatments_cuttoff = uplift_model.predict_optimal_treatments(x", "< rmse_tolerance def test_model_pred_oos_shapes(self): num_obs = 1000 y, x, t", "= np.unique(t, axis = 0) masks = np.ones(num_obs*len(unique_treatments)).reshape(num_obs,len(unique_treatments)) x, utility_weights,", "y, t = t) assert uplift_model_named.get_erupt_curves() def test_model_pred_oos_shapes_single_col_tmt(self): num_obs =", "uplift_model = MRUplift() uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1)", "optimized_loss = True, use_propensity = True) varimp_propensity = uplift_model_propensity.permutation_varimp(objective_weights =", "dropout=[.1], activation=['relu'], num_layers=[2], epochs=[1], batch_size=[100], alpha = [.5], copy_several_times =", "y, t, param_grid = param_grid, n_jobs=1, optimized_loss = True, use_propensity", "np.array([[0, 0], [1, .5]]) rmse_tolerance = .05 num_obs = 10000", "= True) assert uplift_model.predict_ice().shape == ( np.unique(t, axis=0).shape[0], num_obs *", "MRUplift() uplift_model_named.fit(x_1, y_1, t, param_grid = param_grid, n_jobs=1) assert uplift_model.predict_ice().shape", "use_propensity_score_cutoff = True) optim_treatments_cuttoff_cat = optim_treatments_cuttoff.argmax(axis = 1) optim_treatments_no_cuttoff_cat =", "as pd class TestMRUplift(object): def test_get_t_data(self): num_obs_1 = 10 num_obs_2", "True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) -true_ATE)**2)) < rmse_tolerance def test_model_pred_oos_shapes(self): num_obs =", "param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y_no_noise, x_no_noise,", "varimp_propensity = uplift_model_propensity.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) assert varimp['permutation_varimp_metric'].iloc[0]>varimp['permutation_varimp_metric'].iloc[1] assert varimp_propensity['permutation_varimp_metric'].iloc[0]>varimp_propensity['permutation_varimp_metric'].iloc[1] def", "= 0) uplift_model.best_params_net y_test, x_test, t_test, rule_assignment_test = get_observational_uplift_data_1(num_obs) experiment_groups", "= np.array([[0, 0], [1, .5]]) rmse_tolerance = .05 num_obs =", "y.shape[1]) assert uplift_model.get_erupt_curves() assert uplift_model.get_erupt_curves(x = x, y = y,", "== 0)[0]] ]).mean() correct_tmts_2 = np.array([x in [1,2] for x", "(missing_y_mat[:,q,0] == -999)).mean() ==1 ) def test_prepare_data_optimized_loss_two_col_tmt(self): num_obs = 1000", "= [2]) uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x, y, t.reshape(-1, 1), n_jobs=1,", "= 0) masks = np.ones(num_obs).reshape(num_obs,1) x, utility_weights, missing_utility, missing_y_mat, masks,", "tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss = True, use_propensity", "assert varimp_propensity['permutation_varimp_metric'].iloc[0]>varimp_propensity['permutation_varimp_metric'].iloc[1] def test_model_propensity(self): num_obs = 10000 TOLERANCE = .98", "t.reshape(-1, 1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[1], batch_size=[100],", "assert correct_tmts_3>TOLERANCE assert correct_tmts_4>TOLERANCE assert correct_tmts_experiment_groups_1>TOLERANCE assert np.array_equal(optim_treatments_cuttoff_cat,optim_treatments_no_cuttoff_cat) is False", "uplift_model.best_params_net y_test, x_test, t_test, rule_assignment_test = get_observational_uplift_data_1(num_obs) experiment_groups = np.zeros(num_obs)+2", "= uplift_model_propensity.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) assert varimp['permutation_varimp_metric'].iloc[0]>varimp['permutation_varimp_metric'].iloc[1] assert varimp_propensity['permutation_varimp_metric'].iloc[0]>varimp_propensity['permutation_varimp_metric'].iloc[1] def test_model_propensity(self):", "test_2 = get_t_data(np.array([0, 1]), num_obs_2) test_1_values = np.zeros(num_obs_1).reshape(-1, 1) test_2_values", "TestMRUplift(object): def test_get_t_data(self): num_obs_1 = 10 num_obs_2 = 3 test_1", "= uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) -true_ATE)**2)) < rmse_tolerance def", "param_grid, n_jobs=1, optimized_loss = True, use_propensity = True) assert uplift_model.predict_ice().shape", "x, utility_weights, missing_utility, missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t, masks ,unique_treatments)", "import get_no_noise_data, get_simple_uplift_data, get_observational_uplift_data_1 from mr_uplift.mr_uplift import MRUplift, get_t_data from", "= y, t = t) assert uplift_model_named.get_erupt_curves() def test_model_pred_oos_shapes_single_col_tmt(self): num_obs", "param_grid = param_grid, n_jobs=1) assert uplift_model.predict_ice().shape == ( np.unique(t, axis=0).shape[0],", "masks = np.ones(num_obs*len(unique_treatments)).reshape(num_obs,len(unique_treatments)) x, utility_weights, missing_utility, missing_y_mat, masks, weights =", "t_test, rule_assignment_test = get_observational_uplift_data_1(num_obs) experiment_groups = np.zeros(num_obs)+2 experiment_groups[np.where(x_test[:,-2]<.5)[0]] = 1", "True, test_size = 0) uplift_model.best_params_net y_test, x_test, t_test, rule_assignment_test =", "= pd.DataFrame(x_1) x_1.columns = ['var_'+str(x) for x in range(x.shape[1])] y_1", "= True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) - true_ATE)**2)) < rmse_tolerance def test_model_get_random_erupts(self):", "0)[0]] ]).mean() correct_tmts_2 = np.array([x in [1,2] for x in", "x in range(x.shape[1])] y_1 = y.copy() y_1 = pd.DataFrame(y_1) y_1.columns", "in optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] ]).mean() correct_tmts_3 = np.array([x in [0,2]", "= get_t_data(np.array([0, 1]), num_obs_2) test_1_values = np.zeros(num_obs_1).reshape(-1, 1) test_2_values =", "alpha = [.5], copy_several_times = [2]) uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x,", "optim_treatments_cuttoff_cat[np.where(experiment_groups == 2)[0]] ]).mean() correct_tmts_4 = np.array([x in [0] for", "3 test_1 = get_t_data(0, num_obs_1) test_2 = get_t_data(np.array([0, 1]), num_obs_2)", "t) assert uplift_model_named.get_erupt_curves() def test_model_pred_oos_shapes_single_col_tmt(self): num_obs = 1000 y, x,", "weights = prepare_data_optimized_loss(x,y,t,masks, unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape ==", "in [0,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 2)[0]] ]).mean() correct_tmts_4", "np.unique(t, axis = 0) masks = np.ones(num_obs*len(unique_treatments)).reshape(num_obs,len(unique_treatments)) x, utility_weights, missing_utility,", "num_obs, y.shape[1]) assert uplift_model.get_erupt_curves() assert uplift_model.get_erupt_curves(x = x, y =", "uplift_model_named.get_erupt_curves() def test_model_pred_oos_shapes_single_col_tmt(self): num_obs = 1000 y, x, t =", "oos_ice = uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) -true_ATE)**2)) < rmse_tolerance", "1)[0]] ]).mean() correct_tmts_3 = np.array([x in [0,2] for x in", "y_1.columns = ['var_'+str(x) for x in range(y.shape[1])] uplift_model_named = MRUplift()", "pandas as pd class TestMRUplift(object): def test_get_t_data(self): num_obs_1 = 10", "def test_model_pred_oos_shapes_single_col_tmt_propensity(self): num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs)", "np.ones(num_obs).reshape(num_obs,1) x, utility_weights, missing_utility, missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t, masks", "= param_grid) varimp = uplift_model.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) param_grid = dict(num_nodes=[8],", "optim_treatments_cuttoff_cat = optim_treatments_cuttoff.argmax(axis = 1) optim_treatments_no_cuttoff_cat = optim_treatments_no_cuttoff.argmax(axis = 1)", "1), n_jobs=1, param_grid = param_grid, optimized_loss = True, use_propensity =", "in optim_treatments_cuttoff_cat[np.where(experiment_groups == 3)[0]] ]).mean() correct_tmts_experiment_groups_1 = ((optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]]", "1) == x_test[np.where(experiment_groups == 1)[0],0]).mean() correct_tmts_no_cutoff = np.mean((optim_treatments_no_cuttoff_cat==1 ) ==", "def test_prepare_data_optimized_loss_two_col_tmt(self): num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs)", "== x_test[:,0]) assert correct_tmts_1>TOLERANCE assert correct_tmts_2>TOLERANCE assert correct_tmts_3>TOLERANCE assert correct_tmts_4>TOLERANCE", "epochs=[1], batch_size=[100], alpha = [.5], copy_several_times = [1]) uplift_model =", "[.9999,.99], copy_several_times = [1]) uplift_model = MRUplift() uplift_model.fit(x, y[:,0].reshape(-1,1), t,", "y, t, param_grid = param_grid, n_jobs=1) assert uplift_model.predict_ice().shape == (", "= 10000 param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100],", "uplift_model_propensity.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) assert varimp['permutation_varimp_metric'].iloc[0]>varimp['permutation_varimp_metric'].iloc[1] assert varimp_propensity['permutation_varimp_metric'].iloc[0]>varimp_propensity['permutation_varimp_metric'].iloc[1] def test_model_propensity(self): num_obs", ".5, num_obs).reshape(-1, 1)], axis=1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'],", "<gh_stars>10-100 import numpy as np import pytest from mr_uplift.dataset.data_simulation import", "1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[1], batch_size=[100], alpha", "= y, t = t) def test_model_pred_oos_shapes_single_col_tmt_propensity(self): num_obs = 1000", "0) masks = np.ones(num_obs*len(unique_treatments)).reshape(num_obs,len(unique_treatments)) x, utility_weights, missing_utility, missing_y_mat, masks, weights", "= 1) correct_tmts_1 = np.array([x in [0,1] for x in", "uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss =", "= prepare_data_optimized_loss(x,y,t, masks ,unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape ==", "num_obs = 10000 TOLERANCE = .98 y, x, t, rule_assignment", "get_observational_uplift_data_1 from mr_uplift.mr_uplift import MRUplift, get_t_data from mr_uplift.keras_model_functionality import prepare_data_optimized_loss", "x.copy() x_1 = pd.DataFrame(x_1) x_1.columns = ['var_'+str(x) for x in", "False) optim_treatments_cuttoff = uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff = True) optim_treatments_cuttoff_cat", "= 1000 y, x, t = get_simple_uplift_data(num_obs) t = t.reshape(len(t),1)", "-true_ATE)**2)) < rmse_tolerance def test_model_pred_oos_shapes(self): num_obs = 1000 y, x,", "assert uplift_model.predict_ice(x=x).shape == (np.unique(t,axis=0).shape[0], num_obs, y.shape[1]) assert uplift_model.get_erupt_curves() assert uplift_model.get_erupt_curves(x", "import MRUplift, get_t_data from mr_uplift.keras_model_functionality import prepare_data_optimized_loss import sys import", "MRUplift() uplift_model_propensity.fit(x, y, t.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss", "0], [1, .5]]) rmse_tolerance = .05 num_obs = 10000 y_no_noise,", "= uplift_model.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2],", "class TestMRUplift(object): def test_get_t_data(self): num_obs_1 = 10 num_obs_2 = 3", "10000 param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y,", "for x in range(y.shape[1])] uplift_model_named = MRUplift() uplift_model_named.fit(x_1, y_1, t,", "optimized_loss = True) oos_re = uplift_model.get_random_erupts() uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x_no_noise,", "y, x, t, rule_assignment = get_observational_uplift_data_1(num_obs) param_grid = dict(num_nodes=[8], dropout=[.1],", "x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] ]).mean() correct_tmts_3 = np.array([x in", "param_grid, n_jobs=1) assert uplift_model.predict_ice().shape == ( np.unique(t, axis=0).shape[0], num_obs *", "== 2)[0]] ]).mean() correct_tmts_4 = np.array([x in [0] for x", "MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1) oos_ice = uplift_model.predict_ice(response_transformer =", "num_layers=[2], epochs=[30], batch_size=[100], alpha = [.5], copy_several_times = [2]) y_no_noise,", "param_grid = param_grid, optimized_loss = True) oos_re = uplift_model.get_random_erupts() uplift_model_propensity", "= [2]) y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model = MRUplift()", "from mr_uplift.keras_model_functionality import prepare_data_optimized_loss import sys import pandas as pd", "1000 y, x, t = get_simple_uplift_data(num_obs) t = t.reshape(len(t),1) unique_treatments", "-999)).mean() ==1 ) def test_prepare_data_optimized_loss_two_col_tmt(self): num_obs = 1000 y, x,", "axis=1) assert np.mean(test_1 == test_1_values) == 1 assert np.mean(test_2 ==", "np.array([x in [1,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] ]).mean()", "x, y = y, t = t) def test_prepare_data_optimized_loss_one_col_tmt(self): num_obs", "1) correct_tmts_1 = np.array([x in [0,1] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups", "oos_re['mean'].iloc[0] > 0 assert oos_re_propensity['mean'].iloc[0] > 0 def test_varimp(self): num_obs", "= np.zeros(num_obs_1).reshape(-1, 1) test_2_values = np.concatenate([np.zeros(num_obs_2).reshape(-1, 1), np.ones(num_obs_2).reshape(-1, 1)], axis=1)", "= False) oos_ice = uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) -", "= t) def test_prepare_data_optimized_loss_one_col_tmt(self): num_obs = 1000 y, x, t", "dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[1], epochs=[20], batch_size=[512], alpha = [.9999,.99], copy_several_times", "[1]) uplift_model = MRUplift() uplift_model.fit(x, y[:,0].reshape(-1,1), t, param_grid = param_grid,", "y = y, t = t) def test_prepare_data_optimized_loss_one_col_tmt(self): num_obs =", "= [1]) uplift_model = MRUplift() uplift_model.fit(x, y[:,0].reshape(-1,1), t, param_grid =", "= t.reshape(-1, 1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'], num_layers=[1],", "test_1_values = np.zeros(num_obs_1).reshape(-1, 1) test_2_values = np.concatenate([np.zeros(num_obs_2).reshape(-1, 1), np.ones(num_obs_2).reshape(-1, 1)],", "t = t.reshape(len(t),1) unique_treatments = np.unique(t, axis = 0) masks", "t, param_grid = param_grid, n_jobs=1) x_1 = x.copy() x_1 =", "alpha = [.5], copy_several_times = [1]) uplift_model = MRUplift() uplift_model.fit(x,", "MRUplift() uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1, optimized_loss =", "in range(x.shape[1])] y_1 = y.copy() y_1 = pd.DataFrame(y_1) y_1.columns =", "num_layers=[1], epochs=[20], batch_size=[512], alpha = [.9999,.99], copy_several_times = [1]) uplift_model", "uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1, optimized_loss = True,", "pd.DataFrame(y_1) y_1.columns = ['var_'+str(x) for x in range(y.shape[1])] uplift_model_named =", "True, use_propensity = True) assert uplift_model.predict_ice().shape == ( np.unique(t, axis=0).shape[0],", "for q in range(unique_treatments.shape[0]): assert( ((missing_utility[:,q]==0) == (missing_y_mat[:,q,0] == -999)).mean()", "(num_obs, unique_treatments.shape[0], y.shape[1])) for q in range(unique_treatments.shape[0]): assert( ((missing_utility[:,q]==0) ==", "uplift_model = MRUplift() uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1,", "= dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[1], epochs=[20], batch_size=[512], alpha = [.9999,.99],", "correct_tmts_1 = np.array([x in [0,1] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups ==", "in optim_treatments_cuttoff_cat[np.where(experiment_groups == 2)[0]] ]).mean() correct_tmts_4 = np.array([x in [0]", "y, x, t = get_simple_uplift_data(num_obs) t = t.reshape(len(t),1) unique_treatments =", "get_t_data(np.array([0, 1]), num_obs_2) test_1_values = np.zeros(num_obs_1).reshape(-1, 1) test_2_values = np.concatenate([np.zeros(num_obs_2).reshape(-1,", "== -999)).mean() ==1 ) def test_prepare_data_optimized_loss_two_col_tmt(self): num_obs = 1000 y,", "x_test[np.where(experiment_groups == 1)[0],0]).mean() correct_tmts_no_cutoff = np.mean((optim_treatments_no_cuttoff_cat==1 ) == x_test[:,0]) assert", "tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss = True) oos_re", "n_jobs=1) assert uplift_model.predict_ice().shape == ( np.unique(t, axis=0).shape[0], num_obs * .7,", "varimp['permutation_varimp_metric'].iloc[0]>varimp['permutation_varimp_metric'].iloc[1] assert varimp_propensity['permutation_varimp_metric'].iloc[0]>varimp_propensity['permutation_varimp_metric'].iloc[1] def test_model_propensity(self): num_obs = 10000 TOLERANCE =", "== 1) == x_test[np.where(experiment_groups == 1)[0],0]).mean() correct_tmts_no_cutoff = np.mean((optim_treatments_no_cuttoff_cat==1 )", "uplift_model.get_random_erupts() uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid", "= np.array([x in [1,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]]", "= [.5], copy_several_times = [2]) uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x, y,", "experiment_groups = np.zeros(num_obs)+2 experiment_groups[np.where(x_test[:,-2]<.5)[0]] = 1 experiment_groups[np.where(x_test[:,-2]<.33)[0]] = 0 experiment_groups[np.where(x_test[:,-1]>.8)[0]]", "]).mean() correct_tmts_4 = np.array([x in [0] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups", "0], [1, .5]]) rmse_tolerance = .05 num_obs = 10000 param_grid", "num_layers=[2], epochs=[30], batch_size=[100], alpha = [.5], copy_several_times = [2]) uplift_model_propensity", "x_test, use_propensity_score_cutoff = False) optim_treatments_cuttoff = uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff", "uplift_model.fit(x, y, t.reshape(-1, 1), n_jobs=1, param_grid = param_grid) varimp =", "= True, use_propensity = True) varimp_propensity = uplift_model_propensity.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1))", "= prepare_data_optimized_loss(x,y,t,masks, unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape == (num_obs,", "np.mean(test_1 == test_1_values) == 1 assert np.mean(test_2 == test_2_values) ==", "def test_model_pred_oos_shapes_single_col_tmt(self): num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs)", "False) oos_ice = uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) - true_ATE)**2))", "0 def test_varimp(self): num_obs = 10000 param_grid = dict(num_nodes=[8], dropout=[.1],", "get_observational_uplift_data_1(num_obs) experiment_groups = np.zeros(num_obs)+2 experiment_groups[np.where(x_test[:,-2]<.5)[0]] = 1 experiment_groups[np.where(x_test[:,-2]<.33)[0]] = 0", "[1, .5]]) rmse_tolerance = .05 num_obs = 10000 y_no_noise, x_no_noise,", "t.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss = True, use_propensity", "assert np.sqrt(np.mean((oos_ice.mean(axis=1) - true_ATE)**2)) < rmse_tolerance def test_model_get_random_erupts(self): true_ATE =", "unique_treatments = np.unique(t, axis = 0) masks = np.ones(num_obs).reshape(num_obs,1) x,", "[1]) uplift_model = MRUplift() uplift_model.fit(x, y, t, param_grid = param_grid,", "dropout=[.1], activation=[ 'relu'], num_layers=[1], epochs=[1], batch_size=[1000]) uplift_model = MRUplift() uplift_model.fit(x,", "rmse_tolerance def test_model_pred_oos_shapes(self): num_obs = 1000 y, x, t =", "optim_treatments_no_cuttoff_cat = optim_treatments_no_cuttoff.argmax(axis = 1) correct_tmts_1 = np.array([x in [0,1]", "[0,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 2)[0]] ]).mean() correct_tmts_4 =", "assert np.mean(test_2 == test_2_values) == 1 def test_model_mean_outputs(self): true_ATE =", "= 1) optim_treatments_no_cuttoff_cat = optim_treatments_no_cuttoff.argmax(axis = 1) correct_tmts_1 = np.array([x", "range(y.shape[1])] uplift_model_named = MRUplift() uplift_model_named.fit(x_1, y_1, t, param_grid = param_grid,", "uplift_model.fit(x, y[:,0].reshape(-1,1), t, param_grid = param_grid, n_jobs=1, optimized_loss = True,", "((missing_utility[:,q]==0) == (missing_y_mat[:,q,0] == -999)).mean() ==1 ) def test_prepare_data_optimized_loss_two_col_tmt(self): num_obs", "np.array([x in [0] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 3)[0]] ]).mean()", "True) varimp_propensity = uplift_model_propensity.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) assert varimp['permutation_varimp_metric'].iloc[0]>varimp['permutation_varimp_metric'].iloc[1] assert varimp_propensity['permutation_varimp_metric'].iloc[0]>varimp_propensity['permutation_varimp_metric'].iloc[1]", "use_propensity = True, test_size = 0) uplift_model.best_params_net y_test, x_test, t_test,", "['var_'+str(x) for x in range(x.shape[1])] y_1 = y.copy() y_1 =", "= optim_treatments_no_cuttoff.argmax(axis = 1) correct_tmts_1 = np.array([x in [0,1] for", "= t.reshape(len(t),1) unique_treatments = np.unique(t, axis = 0) masks =", "uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x, y, t.reshape(-1, 1), n_jobs=1, param_grid =", "optimized_loss = False) oos_ice = uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1)", "10 num_obs_2 = 3 test_1 = get_t_data(0, num_obs_1) test_2 =", "uplift_model_propensity.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss =", "= MRUplift() uplift_model.fit(x, y[:,0].reshape(-1,1), t, param_grid = param_grid, n_jobs=1, optimized_loss", "x in range(y.shape[1])] uplift_model_named = MRUplift() uplift_model_named.fit(x_1, y_1, t, param_grid", "optim_treatments_cuttoff_cat[np.where(experiment_groups == 0)[0]] ]).mean() correct_tmts_2 = np.array([x in [1,2] for", "epochs=[30], batch_size=[100]) y, x, t = get_simple_uplift_data(num_obs) uplift_model = MRUplift()", "= True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) -true_ATE)**2)) < rmse_tolerance def test_model_pred_oos_shapes(self): num_obs", "[2]) y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x_no_noise,", "t = t.reshape(-1, 1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2],", "batch_size=[100]) y, x, t = get_simple_uplift_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x,", "test_model_pred_oos_shapes_single_col_tmt_propensity(self): num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs) t", ") def test_model_optim_mean_outputs(self): true_ATE = np.array([[0, 0], [1, .5]]) rmse_tolerance", "= get_no_noise_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1,", "1]), num_obs_2) test_1_values = np.zeros(num_obs_1).reshape(-1, 1) test_2_values = np.concatenate([np.zeros(num_obs_2).reshape(-1, 1),", "dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs)", "= MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid,", "np.unique(t, axis = 0) masks = np.ones(num_obs).reshape(num_obs,1) x, utility_weights, missing_utility,", "for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 2)[0]] ]).mean() correct_tmts_4 = np.array([x", "true_ATE = np.array([[0, 0], [1, .5]]) rmse_tolerance = .05 num_obs", "get_no_noise_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid", "> 0 def test_varimp(self): num_obs = 10000 param_grid = dict(num_nodes=[8],", "t, param_grid = param_grid, n_jobs=1, optimized_loss = True, use_propensity =", "mr_uplift.dataset.data_simulation import get_no_noise_data, get_simple_uplift_data, get_observational_uplift_data_1 from mr_uplift.mr_uplift import MRUplift, get_t_data", "epochs=[30], batch_size=[100]) y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model = MRUplift()", "rule_assignment = get_observational_uplift_data_1(num_obs) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[1], epochs=[20],", "1)[0],0]).mean() correct_tmts_no_cutoff = np.mean((optim_treatments_no_cuttoff_cat==1 ) == x_test[:,0]) assert correct_tmts_1>TOLERANCE assert", ") def test_prepare_data_optimized_loss_two_col_tmt(self): num_obs = 1000 y, x, t =", "t.reshape(-1, 1), n_jobs=1, param_grid = param_grid) varimp = uplift_model.permutation_varimp(objective_weights =", "epochs=[20], batch_size=[512], alpha = [.9999,.99], copy_several_times = [1]) uplift_model =", "optim_treatments_cuttoff_cat[np.where(experiment_groups == 3)[0]] ]).mean() correct_tmts_experiment_groups_1 = ((optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] ==", "1) test_2_values = np.concatenate([np.zeros(num_obs_2).reshape(-1, 1), np.ones(num_obs_2).reshape(-1, 1)], axis=1) assert np.mean(test_1", "y.shape[1])) assert(missing_y_mat.shape == (num_obs, unique_treatments.shape[0], y.shape[1])) for q in range(unique_treatments.shape[0]):", "True) oos_re = uplift_model.get_random_erupts() uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1,", "y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1) oos_ice = uplift_model.predict_ice(response_transformer = True) assert", "param_grid) varimp = uplift_model.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) param_grid = dict(num_nodes=[8], dropout=[.1],", "10000 y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x_no_noise,", "assert uplift_model.get_erupt_curves(x = x, y = y, t = t)", "utility_weights, missing_utility, missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t, masks ,unique_treatments) assert(utility_weights.shape", "[.5], copy_several_times = [2]) y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model", "n_jobs=1, param_grid = param_grid, optimized_loss = True) oos_re = uplift_model.get_random_erupts()", "== 1 assert np.mean(test_2 == test_2_values) == 1 def test_model_mean_outputs(self):", "y_test, x_test, t_test, rule_assignment_test = get_observational_uplift_data_1(num_obs) experiment_groups = np.zeros(num_obs)+2 experiment_groups[np.where(x_test[:,-2]<.5)[0]]", "get_t_data from mr_uplift.keras_model_functionality import prepare_data_optimized_loss import sys import pandas as", "= [.5], copy_several_times = [2]) y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs)", "param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[1], epochs=[20], batch_size=[512], alpha =", "import pytest from mr_uplift.dataset.data_simulation import get_no_noise_data, get_simple_uplift_data, get_observational_uplift_data_1 from mr_uplift.mr_uplift", "np.ones(num_obs_2).reshape(-1, 1)], axis=1) assert np.mean(test_1 == test_1_values) == 1 assert", "in [0,1] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 0)[0]] ]).mean() correct_tmts_2", "= pd.DataFrame(y_1) y_1.columns = ['var_'+str(x) for x in range(y.shape[1])] uplift_model_named", "x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 3)[0]] ]).mean() correct_tmts_experiment_groups_1 = ((optim_treatments_cuttoff_cat[np.where(experiment_groups ==", "test_model_mean_outputs(self): true_ATE = np.array([[0, 0], [1, .5]]) rmse_tolerance = .05", "- true_ATE)**2)) < rmse_tolerance def test_model_get_random_erupts(self): true_ATE = np.array([[0, 0],", "= True, test_size = 0) uplift_model.best_params_net y_test, x_test, t_test, rule_assignment_test", "MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid = param_grid, optimized_loss", "param_grid, optimized_loss = False) oos_ice = uplift_model.predict_ice(response_transformer = True) assert", "from mr_uplift.mr_uplift import MRUplift, get_t_data from mr_uplift.keras_model_functionality import prepare_data_optimized_loss import", "= 10 num_obs_2 = 3 test_1 = get_t_data(0, num_obs_1) test_2", "= x_test, use_propensity_score_cutoff = False) optim_treatments_cuttoff = uplift_model.predict_optimal_treatments(x = x_test,", "= np.array([x in [0,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 2)[0]]", "test_get_t_data(self): num_obs_1 = 10 num_obs_2 = 3 test_1 = get_t_data(0,", "param_grid = dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'], num_layers=[1], epochs=[1], batch_size=[1000]) uplift_model", "alpha = [.5], copy_several_times = [2]) y_no_noise, x_no_noise, tmt_no_noise =", "t.reshape(-1, 1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=[ 'relu'], num_layers=[1], epochs=[1],", "num_layers=[2], epochs=[30], batch_size=[100]) y, x, t = get_simple_uplift_data(num_obs) uplift_model =", "== 1)[0]] ]).mean() correct_tmts_3 = np.array([x in [0,2] for x", "pd class TestMRUplift(object): def test_get_t_data(self): num_obs_1 = 10 num_obs_2 =", "1), n_jobs=1, param_grid = param_grid, optimized_loss = False) oos_ice =", "num_obs).reshape(-1, 1)], axis=1) unique_treatments = np.unique(t, axis = 0) masks", "0 assert oos_re_propensity['mean'].iloc[0] > 0 def test_varimp(self): num_obs = 10000", "axis = 0) masks = np.ones(num_obs*len(unique_treatments)).reshape(num_obs,len(unique_treatments)) x, utility_weights, missing_utility, missing_y_mat,", "t, param_grid = param_grid, n_jobs=1) assert uplift_model.predict_ice().shape == ( np.unique(t,", "dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y_no_noise, x_no_noise, tmt_no_noise =", "= np.array([x in [0,1] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 0)[0]]", "3 optim_treatments_no_cuttoff = uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff = False) optim_treatments_cuttoff", "in range(unique_treatments.shape[0]): assert( ((missing_utility[:,q]==0) == (missing_y_mat[:,q,0] == -999)).mean() ==1 )", "= uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff = True) optim_treatments_cuttoff_cat = optim_treatments_cuttoff.argmax(axis", "correct_tmts_3>TOLERANCE assert correct_tmts_4>TOLERANCE assert correct_tmts_experiment_groups_1>TOLERANCE assert np.array_equal(optim_treatments_cuttoff_cat,optim_treatments_no_cuttoff_cat) is False assert", "uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1) x_1 = x.copy()", "MRUplift() uplift_model.fit(x, y, t, param_grid = param_grid, n_jobs=1) assert uplift_model.predict_ice().shape", "assert oos_re['mean'].iloc[0] > 0 assert oos_re_propensity['mean'].iloc[0] > 0 def test_varimp(self):", "= 1 experiment_groups[np.where(x_test[:,-2]<.33)[0]] = 0 experiment_groups[np.where(x_test[:,-1]>.8)[0]] = 3 optim_treatments_no_cuttoff =", "for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] ]).mean() correct_tmts_3 = np.array([x", "masks ,unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape == (num_obs, unique_treatments.shape[0],", "= MRUplift() uplift_model_named.fit(x_1, y_1, t, param_grid = param_grid, n_jobs=1) assert", "param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[1], batch_size=[100], alpha =", "experiment_groups[np.where(x_test[:,-1]>.8)[0]] = 3 optim_treatments_no_cuttoff = uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff =", "optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] ]).mean() correct_tmts_3 = np.array([x in [0,2] for", "batch_size=[1000]) uplift_model = MRUplift() uplift_model.fit(x, y, t, param_grid = param_grid,", "num_obs_1 = 10 num_obs_2 = 3 test_1 = get_t_data(0, num_obs_1)", "experiment_groups[np.where(x_test[:,-2]<.33)[0]] = 0 experiment_groups[np.where(x_test[:,-1]>.8)[0]] = 3 optim_treatments_no_cuttoff = uplift_model.predict_optimal_treatments(x =", "== x_test[np.where(experiment_groups == 1)[0],0]).mean() correct_tmts_no_cutoff = np.mean((optim_treatments_no_cuttoff_cat==1 ) == x_test[:,0])", "test_model_propensity(self): num_obs = 10000 TOLERANCE = .98 y, x, t,", "masks = np.ones(num_obs).reshape(num_obs,1) x, utility_weights, missing_utility, missing_y_mat, masks, weights =", "param_grid = param_grid, n_jobs=1) x_1 = x.copy() x_1 = pd.DataFrame(x_1)", "t = get_simple_uplift_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x, y, t.reshape(-1, 1),", "= 10000 param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100])", "mr_uplift.keras_model_functionality import prepare_data_optimized_loss import sys import pandas as pd class", "prepare_data_optimized_loss import sys import pandas as pd class TestMRUplift(object): def", "1), n_jobs=1) oos_ice = uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) -true_ATE)**2))", "assert uplift_model.predict_ice().shape == ( np.unique(t, axis=0).shape[0], num_obs * .7, y.shape[1])", ",unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape == (num_obs, unique_treatments.shape[0], y.shape[1]))", "dropout=[.1], activation=['relu'], num_layers=[1], epochs=[20], batch_size=[512], alpha = [.9999,.99], copy_several_times =", "= param_grid, optimized_loss = False) oos_ice = uplift_model.predict_ice(response_transformer = True)", "= np.zeros(num_obs)+2 experiment_groups[np.where(x_test[:,-2]<.5)[0]] = 1 experiment_groups[np.where(x_test[:,-2]<.33)[0]] = 0 experiment_groups[np.where(x_test[:,-1]>.8)[0]] =", "use_propensity_score_cutoff = False) optim_treatments_cuttoff = uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff =", "1000 y, x, t = get_simple_uplift_data(num_obs) t = t.reshape(-1, 1)", "x, utility_weights, missing_utility, missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t,masks, unique_treatments) assert(utility_weights.shape", "= True) oos_re = uplift_model.get_random_erupts() uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x_no_noise, y_no_noise,", "optimized_loss = True, use_propensity = True) oos_re_propensity = uplift_model_propensity.get_random_erupts() assert", "]).mean() correct_tmts_experiment_groups_1 = ((optim_treatments_cuttoff_cat[np.where(experiment_groups == 1)[0]] == 1) == x_test[np.where(experiment_groups", "np.array([.7,-.3,0]).reshape(1,-1)) assert varimp['permutation_varimp_metric'].iloc[0]>varimp['permutation_varimp_metric'].iloc[1] assert varimp_propensity['permutation_varimp_metric'].iloc[0]>varimp_propensity['permutation_varimp_metric'].iloc[1] def test_model_propensity(self): num_obs = 10000", "num_obs = 10000 y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model =", "np.unique(t, axis=0).shape[0], num_obs * .7, y.shape[1]) assert uplift_model.predict_ice(x=x).shape == (np.unique(t,axis=0).shape[0],", "unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1])) assert(missing_y_mat.shape == (num_obs, unique_treatments.shape[0], y.shape[1]))", "= uplift_model.get_random_erupts() uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1,", "np.mean((optim_treatments_no_cuttoff_cat==1 ) == x_test[:,0]) assert correct_tmts_1>TOLERANCE assert correct_tmts_2>TOLERANCE assert correct_tmts_3>TOLERANCE", "test_1 = get_t_data(0, num_obs_1) test_2 = get_t_data(np.array([0, 1]), num_obs_2) test_1_values", "param_grid = param_grid) varimp = uplift_model.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) param_grid =", "'relu'], num_layers=[1], epochs=[1], batch_size=[1000]) uplift_model = MRUplift() uplift_model.fit(x, y, t,", "np.concatenate([t.reshape(-1, 1), np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1) unique_treatments = np.unique(t,", "import pandas as pd class TestMRUplift(object): def test_get_t_data(self): num_obs_1 =", "y_1, t, param_grid = param_grid, n_jobs=1) assert uplift_model.predict_ice().shape == (", "== test_1_values) == 1 assert np.mean(test_2 == test_2_values) == 1", "t = get_simple_uplift_data(num_obs) t = t.reshape(len(t),1) unique_treatments = np.unique(t, axis", "correct_tmts_no_cutoff = np.mean((optim_treatments_no_cuttoff_cat==1 ) == x_test[:,0]) assert correct_tmts_1>TOLERANCE assert correct_tmts_2>TOLERANCE", "== (np.unique(t,axis=0).shape[0], num_obs, y.shape[1]) assert uplift_model.get_erupt_curves() assert uplift_model.get_erupt_curves(x = x,", "q in range(unique_treatments.shape[0]): assert( ((missing_utility[:,q]==0) == (missing_y_mat[:,q,0] == -999)).mean() ==1", "num_obs_2) test_1_values = np.zeros(num_obs_1).reshape(-1, 1) test_2_values = np.concatenate([np.zeros(num_obs_2).reshape(-1, 1), np.ones(num_obs_2).reshape(-1,", "1), n_jobs=1, param_grid = param_grid, optimized_loss = True) oos_re =", "n_jobs=1, optimized_loss = True, use_propensity = True) assert uplift_model.predict_ice().shape ==", "uplift_model.get_erupt_curves() assert uplift_model.get_erupt_curves(x = x, y = y, t =", "[2]) uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x, y, t.reshape(-1, 1), n_jobs=1, param_grid", "['var_'+str(x) for x in range(y.shape[1])] uplift_model_named = MRUplift() uplift_model_named.fit(x_1, y_1,", "True) oos_re_propensity = uplift_model_propensity.get_random_erupts() assert oos_re['mean'].iloc[0] > 0 assert oos_re_propensity['mean'].iloc[0]", "num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs) t =", "optim_treatments_cuttoff.argmax(axis = 1) optim_treatments_no_cuttoff_cat = optim_treatments_no_cuttoff.argmax(axis = 1) correct_tmts_1 =", "uplift_model = MRUplift() uplift_model.fit(x, y, t.reshape(-1, 1), n_jobs=1, param_grid =", "rmse_tolerance = .05 num_obs = 10000 y_no_noise, x_no_noise, tmt_no_noise =", ".05 num_obs = 10000 param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2],", "x, y = y, t = t) def test_model_pred_oos_shapes_single_col_tmt_propensity(self): num_obs", "y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x_no_noise, y_no_noise,", "assert correct_tmts_2>TOLERANCE assert correct_tmts_3>TOLERANCE assert correct_tmts_4>TOLERANCE assert correct_tmts_experiment_groups_1>TOLERANCE assert np.array_equal(optim_treatments_cuttoff_cat,optim_treatments_no_cuttoff_cat)", "x_1.columns = ['var_'+str(x) for x in range(x.shape[1])] y_1 = y.copy()", "num_layers=[2], epochs=[1], batch_size=[100], alpha = [.5], copy_several_times = [1]) uplift_model", "num_obs_2 = 3 test_1 = get_t_data(0, num_obs_1) test_2 = get_t_data(np.array([0,", "= np.array([.7,-.3,0]).reshape(1,-1)) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100],", "uplift_model.predict_ice().shape == ( np.unique(t, axis=0).shape[0], num_obs * .7, y.shape[1]) assert", "> 0 assert oos_re_propensity['mean'].iloc[0] > 0 def test_varimp(self): num_obs =", "def test_model_mean_outputs(self): true_ATE = np.array([[0, 0], [1, .5]]) rmse_tolerance =", "import sys import pandas as pd class TestMRUplift(object): def test_get_t_data(self):", "num_layers=[2], epochs=[30], batch_size=[100]) y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model =", "test_1_values) == 1 assert np.mean(test_2 == test_2_values) == 1 def", ".7, y.shape[1]) assert uplift_model.predict_ice(x=x).shape == (np.unique(t,axis=0).shape[0], num_obs, y.shape[1]) assert uplift_model.get_erupt_curves()", "TOLERANCE = .98 y, x, t, rule_assignment = get_observational_uplift_data_1(num_obs) param_grid", "= param_grid, n_jobs=1, optimized_loss = True, use_propensity = True) assert", "oos_ice = uplift_model.predict_ice(response_transformer = True) assert np.sqrt(np.mean((oos_ice.mean(axis=1) - true_ATE)**2)) <", ".5]]) rmse_tolerance = .05 num_obs = 10000 param_grid = dict(num_nodes=[8],", "t) def test_model_pred_oos_shapes_single_col_tmt_propensity(self): num_obs = 1000 y, x, t =", "def test_model_pred_oos_shapes(self): num_obs = 1000 y, x, t = get_simple_uplift_data(num_obs)", "= MRUplift() uplift_model.fit(x, y, t.reshape(-1, 1), n_jobs=1, param_grid = param_grid)", "t = t) def test_prepare_data_optimized_loss_one_col_tmt(self): num_obs = 1000 y, x,", "= False) optim_treatments_cuttoff = uplift_model.predict_optimal_treatments(x = x_test, use_propensity_score_cutoff = True)", "oos_re = uplift_model.get_random_erupts() uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1),", "y = y, t = t) assert uplift_model_named.get_erupt_curves() def test_model_pred_oos_shapes_single_col_tmt(self):", "= dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100], alpha = [.5],", "x, t = get_simple_uplift_data(num_obs) t = np.concatenate([t.reshape(-1, 1), np.random.binomial(1, .5,", "param_grid = param_grid, optimized_loss = True, use_propensity = True) oos_re_propensity", "(np.unique(t,axis=0).shape[0], num_obs, y.shape[1]) assert uplift_model.get_erupt_curves() assert uplift_model.get_erupt_curves(x = x, y", "assert( ((missing_utility[:,q]==0) == (missing_y_mat[:,q,0] == -999)).mean() ==1 ) def test_model_optim_mean_outputs(self):", "= True, use_propensity = True, test_size = 0) uplift_model.best_params_net y_test,", "assert varimp['permutation_varimp_metric'].iloc[0]>varimp['permutation_varimp_metric'].iloc[1] assert varimp_propensity['permutation_varimp_metric'].iloc[0]>varimp_propensity['permutation_varimp_metric'].iloc[1] def test_model_propensity(self): num_obs = 10000 TOLERANCE", "== 1 def test_model_mean_outputs(self): true_ATE = np.array([[0, 0], [1, .5]])", "= .05 num_obs = 10000 y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs)", "get_no_noise_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1) oos_ice", "range(x.shape[1])] y_1 = y.copy() y_1 = pd.DataFrame(y_1) y_1.columns = ['var_'+str(x)", "= t.reshape(-1, 1) param_grid = dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[1],", "uplift_model_propensity.get_random_erupts() assert oos_re['mean'].iloc[0] > 0 assert oos_re_propensity['mean'].iloc[0] > 0 def", "alpha = [.9999,.99], copy_several_times = [1]) uplift_model = MRUplift() uplift_model.fit(x,", "= .98 y, x, t, rule_assignment = get_observational_uplift_data_1(num_obs) param_grid =", ".5]]) rmse_tolerance = .05 num_obs = 10000 y_no_noise, x_no_noise, tmt_no_noise", "= y, t = t) def test_prepare_data_optimized_loss_one_col_tmt(self): num_obs = 1000", "1)], axis=1) assert np.mean(test_1 == test_1_values) == 1 assert np.mean(test_2", "x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 0)[0]] ]).mean() correct_tmts_2 = np.array([x in", "== (num_obs, y.shape[1])) assert(missing_y_mat.shape == (num_obs, unique_treatments.shape[0], y.shape[1])) for q", "n_jobs=1, param_grid = param_grid, optimized_loss = False) oos_ice = uplift_model.predict_ice(response_transformer", "copy_several_times = [1]) uplift_model = MRUplift() uplift_model.fit(x, y[:,0].reshape(-1,1), t, param_grid", "correct_tmts_2 = np.array([x in [1,2] for x in optim_treatments_cuttoff_cat[np.where(experiment_groups ==", "in optim_treatments_cuttoff_cat[np.where(experiment_groups == 0)[0]] ]).mean() correct_tmts_2 = np.array([x in [1,2]", "-999)).mean() ==1 ) def test_model_optim_mean_outputs(self): true_ATE = np.array([[0, 0], [1,", "assert uplift_model.get_erupt_curves() assert uplift_model.get_erupt_curves(x = x, y = y, t", "sys import pandas as pd class TestMRUplift(object): def test_get_t_data(self): num_obs_1", "activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100], alpha = [.5], copy_several_times = [2])", "optim_treatments_no_cuttoff.argmax(axis = 1) correct_tmts_1 = np.array([x in [0,1] for x", "= True, use_propensity = True) assert uplift_model.predict_ice().shape == ( np.unique(t,", "copy_several_times = [2]) y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model =", "==1 ) def test_prepare_data_optimized_loss_two_col_tmt(self): num_obs = 1000 y, x, t", "1 experiment_groups[np.where(x_test[:,-2]<.33)[0]] = 0 experiment_groups[np.where(x_test[:,-1]>.8)[0]] = 3 optim_treatments_no_cuttoff = uplift_model.predict_optimal_treatments(x", "rmse_tolerance def test_model_get_random_erupts(self): true_ATE = np.array([[0, 0], [1, .5]]) rmse_tolerance", "= 10000 y_no_noise, x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model = MRUplift()", "use_propensity = True) oos_re_propensity = uplift_model_propensity.get_random_erupts() assert oos_re['mean'].iloc[0] > 0", "for x in optim_treatments_cuttoff_cat[np.where(experiment_groups == 0)[0]] ]).mean() correct_tmts_2 = np.array([x", "True, use_propensity = True) varimp_propensity = uplift_model_propensity.permutation_varimp(objective_weights = np.array([.7,-.3,0]).reshape(1,-1)) assert", "param_grid, optimized_loss = True) oos_re = uplift_model.get_random_erupts() uplift_model_propensity = MRUplift()", "== (num_obs, unique_treatments.shape[0], y.shape[1])) for q in range(unique_treatments.shape[0]): assert( ((missing_utility[:,q]==0)", "missing_y_mat, masks, weights = prepare_data_optimized_loss(x,y,t,masks, unique_treatments) assert(utility_weights.shape == (num_obs, y.shape[1]))", "uplift_model_propensity = MRUplift() uplift_model_propensity.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1, 1), n_jobs=1, param_grid =", "dict(num_nodes=[8], dropout=[.1], activation=['relu'], num_layers=[2], epochs=[30], batch_size=[100]) y, x, t =", "y.shape[1])) for q in range(unique_treatments.shape[0]): assert( ((missing_utility[:,q]==0) == (missing_y_mat[:,q,0] ==", "np.random.binomial(1, .5, num_obs).reshape(-1, 1)], axis=1) unique_treatments = np.unique(t, axis =", "= np.unique(t, axis = 0) masks = np.ones(num_obs).reshape(num_obs,1) x, utility_weights,", "get_simple_uplift_data(num_obs) t = t.reshape(len(t),1) unique_treatments = np.unique(t, axis = 0)", "< rmse_tolerance def test_model_get_random_erupts(self): true_ATE = np.array([[0, 0], [1, .5]])", "x_no_noise, tmt_no_noise = get_no_noise_data(num_obs) uplift_model = MRUplift() uplift_model.fit(x_no_noise, y_no_noise, tmt_no_noise.reshape(-1," ]
[ "y0 = increment(x0) #y0 == 1 self.assertEqual(y0, 1) x1 =", "from example_module import COLORS, increment class ExampleTest(unittest.TestCase): \"\"\" #TODO \"\"\"", "#TODO \"\"\" def test_increment(self): x0 = 0 y0 = increment(x0)", "= 0 y0 = increment(x0) #y0 == 1 self.assertEqual(y0, 1)", "\"\"\" #TODO \"\"\" def test_increment(self): x0 = 0 y0 =", "== 1 self.assertEqual(y0, 1) x1 = 100 y1 = increment(x1)", "x1 = 100 y1 = increment(x1) #y1 == 101 self.assertEqual(y1,", "example_module import COLORS, increment class ExampleTest(unittest.TestCase): \"\"\" #TODO \"\"\" def", "import COLORS, increment class ExampleTest(unittest.TestCase): \"\"\" #TODO \"\"\" def test_increment(self):", "ExampleTest(unittest.TestCase): \"\"\" #TODO \"\"\" def test_increment(self): x0 = 0 y0", "\"\"\" \"\"\" import unittest from example_module import COLORS, increment class", "\"\"\" import unittest from example_module import COLORS, increment class ExampleTest(unittest.TestCase):", "0 y0 = increment(x0) #y0 == 1 self.assertEqual(y0, 1) x1", "class ExampleTest(unittest.TestCase): \"\"\" #TODO \"\"\" def test_increment(self): x0 = 0", "\"\"\" def test_increment(self): x0 = 0 y0 = increment(x0) #y0", "self.assertEqual(y0, 1) x1 = 100 y1 = increment(x1) #y1 ==", "1) x1 = 100 y1 = increment(x1) #y1 == 101", "x0 = 0 y0 = increment(x0) #y0 == 1 self.assertEqual(y0,", "test_increment(self): x0 = 0 y0 = increment(x0) #y0 == 1", "def test_increment(self): x0 = 0 y0 = increment(x0) #y0 ==", "increment class ExampleTest(unittest.TestCase): \"\"\" #TODO \"\"\" def test_increment(self): x0 =", "1 self.assertEqual(y0, 1) x1 = 100 y1 = increment(x1) #y1", "= increment(x0) #y0 == 1 self.assertEqual(y0, 1) x1 = 100", "import unittest from example_module import COLORS, increment class ExampleTest(unittest.TestCase): \"\"\"", "#y0 == 1 self.assertEqual(y0, 1) x1 = 100 y1 =", "= 100 y1 = increment(x1) #y1 == 101 self.assertEqual(y1, 101)", "increment(x0) #y0 == 1 self.assertEqual(y0, 1) x1 = 100 y1", "unittest from example_module import COLORS, increment class ExampleTest(unittest.TestCase): \"\"\" #TODO", "COLORS, increment class ExampleTest(unittest.TestCase): \"\"\" #TODO \"\"\" def test_increment(self): x0" ]
[ "from django.utils.translation import ugettext as _ from desktop.auth import forms", "+ request.get_host() + '/login/oauth_authenticated/' })) if resp['status'] != '200': raise", "is_first_login_ever = first_login_ever() backend_names = get_backend_names() is_active_directory = 'LdapBackend' in", "exc_info=e) request.error(_('Could not create home directory.')) if require_change_password(userprofile): return HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user',", "'first_login_ever': is_first_login_ever, 'login_errors': request.method == 'POST', 'backend_names': backend_names, 'active_directory': is_active_directory", "oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) client = oauth.Client(consumer) resp, content = client.request(OAUTH.REQUEST_TOKEN_URL.get(), \"POST\",", "current_users def first_login_ever(): backends = get_backends() for backend in backends:", "this work for additional information # regarding copyright ownership. Cloudera,", "if request.session.test_cookie_worked(): request.session.delete_test_cookie() auto_create_home_backends = ['AllowAllBackend', 'LdapBackend', 'SpnegoDjangoBackend'] if is_first_login_ever", "more contributor license agreements. See the NOTICE file # distributed", "import ensure_home_directory, require_change_password LOG = logging.getLogger(__name__) def get_current_users(): \"\"\"Return dictionary", "response from OAuth provider: %s\") % resp) request.session['request_token'] = dict(cgi.parse_qsl(content))", "request.REQUEST.get('fromModal', 'false') == 'true': return JsonResponse({'auth': True}) else: return HttpResponseRedirect(redirect_to)", "# For first login, need to validate user info! first_user_form", "backend.logout(request, next_page) if response: return response return django.contrib.auth.views.logout(request, next_page) def", "login(request, user) ensure_home_directory(request.fs, user.username) return HttpResponseRedirect(redirect_to) if not from_modal: request.session.set_test_cookie()", "profile(request): \"\"\" Dumps JSON for user-profile information. \"\"\" return render(None,", "last_name=user.last_name, last_login=str(user.last_login), # datetime object needs to be converted email=user.email)", "@login_notrequired def oauth_login(request): assert oauth is not None consumer =", "2.0 (the # \"License\"); you may not use this file", "None auth_form = AuthenticationForm() if DEMO_ENABLED.get() and not 'admin' in", "based on Twitter as example. @login_notrequired def oauth_login(request): assert oauth", "{ 'action': urlresolvers.reverse('desktop.auth.views.dt_login'), 'form': first_user_form or auth_form, 'next': redirect_to, 'first_login_ever':", "from django.core.exceptions import SuspiciousOperation from django.contrib.auth import login, get_backends, authenticate", "\"\"\"Log out the user\"\"\" username = request.user.get_username() request.audit = {", "cgi import logging import urllib from datetime import datetime from", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "regarding copyright ownership. Cloudera, Inc. licenses this file # to", "userprofile.save() msg = 'Successful login for user: %s' % user.username", "from desktop.conf import LDAP, OAUTH, DEMO_ENABLED from hadoop.fs.exceptions import WebHdfsException", "a dictionary of the user's IP address and last access", "of the user's IP address and last access time\"\"\" current_users", "not create home directory.'), exc_info=e) request.error(_('Could not create home directory.'))", "object needs to be converted email=user.email) # OAuth is based", "request.POST.get('username') } # For first login, need to validate user", "'/login/oauth_authenticated/' })) if resp['status'] != '200': raise Exception(_(\"Invalid response from", "from OAuth provider: %s\") % resp) request.session['request_token'] = dict(cgi.parse_qsl(content)) url", "get_backends() for backend in backends: if hasattr(backend, 'is_first_login_ever') and backend.is_first_login_ever():", "userobj = User.objects.get(pk=uid) current_users[userobj] = last_access_map.get(userobj.username, { }) except User.DoesNotExist:", "not from_modal: request.session.set_test_cookie() renderable_path = 'login.mako' if from_modal: renderable_path =", "= authenticate(username=request.user.username, password='<PASSWORD>') login(request, user) ensure_home_directory(request.fs, user.username) return HttpResponseRedirect(redirect_to) if", "or None first_user = first_user_form and first_user_form.is_valid() if first_user or", "backend_names, 'active_directory': is_active_directory }) def dt_logout(request, next_page=None): \"\"\"Log out the", "if DEMO_ENABLED.get() and not 'admin' in request.REQUEST: user = authenticate(username=request.user.username,", "resp) access_token = dict(cgi.parse_qsl(content)) user = authenticate(access_token=access_token) login(request, user) redirect_to", "if not from_modal: request.session.set_test_cookie() renderable_path = 'login.mako' if from_modal: renderable_path", "HttpResponseRedirect from django.utils.translation import ugettext as _ from desktop.auth import", "import forms as auth_forms from desktop.lib.django_util import render from desktop.lib.django_util", "client = oauth.Client(consumer, token) resp, content = client.request(OAUTH.ACCESS_TOKEN_URL.get(), \"GET\") if", "distributed with this work for additional information # regarding copyright", "= oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) token = oauth.Token(request.session['request_token']['oauth_token'], request.session['request_token']['oauth_token_secret']) client = oauth.Client(consumer,", "consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) token = oauth.Token(request.session['request_token']['oauth_token'], request.session['request_token']['oauth_token_secret']) client =", "address and last access time\"\"\" current_users = { } for", "the user\"\"\" username = request.user.get_username() request.audit = { 'username': username,", "= oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) client = oauth.Client(consumer) resp, content = client.request(OAUTH.REQUEST_TOKEN_URL.get(),", "user: %s' % request.POST.get('username') request.audit['operationText'] = msg access_warn(request, msg) if", "request.audit = { 'operation': 'USER_LOGIN', 'username': request.POST.get('username') } # For", "access_warn(request, msg) if from_modal or request.REQUEST.get('fromModal', 'false') == 'true': return", "'200': raise Exception(_(\"Invalid response from OAuth provider: %s\") % resp)", "get_backends() if backends: for backend in backends: if hasattr(backend, 'logout'):", "import User from django.contrib.sessions.models import Session from django.http import HttpResponseRedirect", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "try: import oauth2 as oauth except: oauth = None import", "additional information # regarding copyright ownership. Cloudera, Inc. licenses this", "= AuthenticationForm() if DEMO_ENABLED.get() and not 'admin' in request.REQUEST: user", "userprofile.last_activity = datetime.now() userprofile.save() msg = 'Successful login for user:", "from django.contrib.auth import login, get_backends, authenticate from django.contrib.auth.models import User", "django.core import urlresolvers from django.core.exceptions import SuspiciousOperation from django.contrib.auth import", "try: ensure_home_directory(request.fs, user.username) except (IOError, WebHdfsException), e: LOG.error(_('Could not create", "= oauth.Token(request.session['request_token']['oauth_token'], request.session['request_token']['oauth_token_secret']) client = oauth.Client(consumer, token) resp, content =", "'USER_LOGOUT', 'operationText': 'Logged out user: %s' % username } backends", "auto_create_home_backends): # Create home directory for first user. try: ensure_home_directory(request.fs,", "under the License is distributed on an \"AS IS\" BASIS,", "login, get_backends, authenticate from django.contrib.auth.models import User from django.contrib.sessions.models import", "file # distributed with this work for additional information #", "True return False def get_backend_names(): return get_backends and [backend.__class__.__name__ for", "# Licensed to Cloudera, Inc. under one # or more", "the AuthenticationForm. # It provides 'backends' on the User object.", "License for the specific language governing permissions and # limitations", "Create home directory for first user. try: ensure_home_directory(request.fs, user.username) except", "@login_notrequired def oauth_authenticated(request): consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) token = oauth.Token(request.session['request_token']['oauth_token'],", "== 'POST', 'backend_names': backend_names, 'active_directory': is_active_directory }) def dt_logout(request, next_page=None):", "and last access time\"\"\" current_users = { } for session", "with id=%d does not exist\" % uid) return current_users def", "ensure_home_directory, require_change_password LOG = logging.getLogger(__name__) def get_current_users(): \"\"\"Return dictionary of", "authenticate from django.contrib.auth.models import User from django.contrib.sessions.models import Session from", "auth_forms.LdapUserCreationForm AuthenticationForm = auth_forms.LdapAuthenticationForm else: UserCreationForm = auth_forms.UserCreationForm AuthenticationForm =", "urllib from datetime import datetime from axes.decorators import watch_login import", "= session.get_decoded().get(django.contrib.auth.SESSION_KEY) except SuspiciousOperation: # If secret_key changed, this resolution", "'true': return JsonResponse({'auth': True}) else: return HttpResponseRedirect(redirect_to) else: request.audit['allowed'] =", "bool(LDAP.NT_DOMAIN.get()) or bool(LDAP.LDAP_SERVERS.get()) ) if is_active_directory: UserCreationForm = auth_forms.LdapUserCreationForm AuthenticationForm", "from hadoop.fs.exceptions import WebHdfsException from useradmin.models import get_profile from useradmin.views", "request.session.test_cookie_worked(): request.session.delete_test_cookie() auto_create_home_backends = ['AllowAllBackend', 'LdapBackend', 'SpnegoDjangoBackend'] if is_first_login_ever or", "useradmin.models import get_profile from useradmin.views import ensure_home_directory, require_change_password LOG =", "the License. try: import oauth2 as oauth except: oauth =", "if is_first_login_ever or any(backend in backend_names for backend in auto_create_home_backends):", "if from_modal or request.REQUEST.get('fromModal', 'false') == 'true': return JsonResponse({'auth': True})", "import logging import urllib from datetime import datetime from axes.decorators", "UserCreationForm(data=request.POST) or None first_user = first_user_form and first_user_form.is_valid() if first_user", "request.REQUEST: user = authenticate(username=request.user.username, password='<PASSWORD>') login(request, user) ensure_home_directory(request.fs, user.username) return", "to you under the Apache License, Version 2.0 (the #", "OAUTH.CONSUMER_SECRET.get()) client = oauth.Client(consumer) resp, content = client.request(OAUTH.REQUEST_TOKEN_URL.get(), \"POST\", body=urllib.urlencode({", "uid) return current_users def first_login_ever(): backends = get_backends() for backend", "on the User object. user = auth_form.get_user() userprofile = get_profile(user)", "userprofile = get_profile(user) login(request, user) if request.session.test_cookie_worked(): request.session.delete_test_cookie() auto_create_home_backends =", "object. user = auth_form.get_user() userprofile = get_profile(user) login(request, user) if", "password='<PASSWORD>') login(request, user) ensure_home_directory(request.fs, user.username) return HttpResponseRedirect(redirect_to) if not from_modal:", "= 'login.mako' if from_modal: renderable_path = 'login_modal.mako' return render(renderable_path, request,", "Licensed to Cloudera, Inc. under one # or more contributor", "or bool(LDAP.LDAP_SERVERS.get()) ) if is_active_directory: UserCreationForm = auth_forms.LdapUserCreationForm AuthenticationForm =", "information. \"\"\" return render(None, request, _profile_dict(request.user)) def _profile_dict(user): return dict(", "\"GET\") if resp['status'] != '200': raise Exception(_(\"Invalid response from OAuth", "may not use this file except in compliance # with", "desktop.lib.django_util import render from desktop.lib.django_util import login_notrequired from desktop.lib.django_util import", "software # distributed under the License is distributed on an", "{ } for session in Session.objects.all(): try: uid = session.get_decoded().get(django.contrib.auth.SESSION_KEY)", "governing permissions and # limitations under the License. try: import", "backend_names = get_backend_names() is_active_directory = 'LdapBackend' in backend_names and (", "not is_first_login_ever: auth_form = AuthenticationForm(data=request.POST) if auth_form.is_valid(): # Must login", "if hasattr(backend, 'is_first_login_ever') and backend.is_first_login_ever(): return True return False def", "== 'true': return JsonResponse({'auth': False}) else: first_user_form = None auth_form", "first_login_ever(): backends = get_backends() for backend in backends: if hasattr(backend,", "License, Version 2.0 (the # \"License\"); you may not use", "backend in auto_create_home_backends): # Create home directory for first user.", "for user-profile information. \"\"\" return render(None, request, _profile_dict(request.user)) def _profile_dict(user):", "is_active_directory: UserCreationForm = auth_forms.LdapUserCreationForm AuthenticationForm = auth_forms.LdapAuthenticationForm else: UserCreationForm =", "LOG.debug(\"User with id=%d does not exist\" % uid) return current_users", "else: first_user_form = None auth_form = AuthenticationForm() if DEMO_ENABLED.get() and", "is based on Twitter as example. @login_notrequired def oauth_login(request): assert", "of User objects and a dictionary of the user's IP", "first_user = first_user_form and first_user_form.is_valid() if first_user or not is_first_login_ever:", "def get_current_users(): \"\"\"Return dictionary of User objects and a dictionary", "and a dictionary of the user's IP address and last", "is_active_directory = 'LdapBackend' in backend_names and ( bool(LDAP.NT_DOMAIN.get()) or bool(LDAP.LDAP_SERVERS.get())", "else: return HttpResponseRedirect(redirect_to) else: request.audit['allowed'] = False msg = 'Failed", "# datetime object needs to be converted email=user.email) # OAuth", "datetime from axes.decorators import watch_login import django.contrib.auth.views from django.core import", "using the AuthenticationForm. # It provides 'backends' on the User", "'login.mako' if from_modal: renderable_path = 'login_modal.mako' return render(renderable_path, request, {", "user\"\"\" username = request.user.get_username() request.audit = { 'username': username, 'operation':", "backend_names for backend in auto_create_home_backends): # Create home directory for", "watch_login import django.contrib.auth.views from django.core import urlresolvers from django.core.exceptions import", "current_users[userobj] = last_access_map.get(userobj.username, { }) except User.DoesNotExist: LOG.debug(\"User with id=%d", "OAuth provider: %s\") % resp) request.session['request_token'] = dict(cgi.parse_qsl(content)) url =", "See the NOTICE file # distributed with this work for", "backend.is_first_login_ever(): return True return False def get_backend_names(): return get_backends and", "user-profile information. \"\"\" return render(None, request, _profile_dict(request.user)) def _profile_dict(user): return", "to validate user info! first_user_form = is_first_login_ever and UserCreationForm(data=request.POST) or", "%s' % request.POST.get('username') request.audit['operationText'] = msg access_warn(request, msg) if from_modal", "import login_notrequired from desktop.lib.django_util import JsonResponse from desktop.log.access import access_warn,", "desktop.conf import LDAP, OAUTH, DEMO_ENABLED from hadoop.fs.exceptions import WebHdfsException from", "ugettext as _ from desktop.auth import forms as auth_forms from", "response: return response return django.contrib.auth.views.logout(request, next_page) def profile(request): \"\"\" Dumps", "django.contrib.auth.views.logout(request, next_page) def profile(request): \"\"\" Dumps JSON for user-profile information.", "+ '/login/oauth_authenticated/' })) if resp['status'] != '200': raise Exception(_(\"Invalid response", "Apache License, Version 2.0 (the # \"License\"); you may not", "import get_profile from useradmin.views import ensure_home_directory, require_change_password LOG = logging.getLogger(__name__)", "get_current_users(): \"\"\"Return dictionary of User objects and a dictionary of", "backends: for backend in backends: if hasattr(backend, 'logout'): response =", "from_modal: request.session.set_test_cookie() renderable_path = 'login.mako' if from_modal: renderable_path = 'login_modal.mako'", "try: uid = session.get_decoded().get(django.contrib.auth.SESSION_KEY) except SuspiciousOperation: # If secret_key changed,", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "request.session.delete_test_cookie() auto_create_home_backends = ['AllowAllBackend', 'LdapBackend', 'SpnegoDjangoBackend'] if is_first_login_ever or any(backend", "response = backend.logout(request, next_page) if response: return response return django.contrib.auth.views.logout(request,", "ownership. Cloudera, Inc. licenses this file # to you under", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "oauth = None import cgi import logging import urllib from", "return django.contrib.auth.views.logout(request, next_page) def profile(request): \"\"\" Dumps JSON for user-profile", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "secret_key changed, this resolution won't work. uid = None if", "file except in compliance # with the License. You may", "to in writing, software # distributed under the License is", "IP address and last access time\"\"\" current_users = { }", "content = client.request(OAUTH.REQUEST_TOKEN_URL.get(), \"POST\", body=urllib.urlencode({ 'oauth_callback': 'http://' + request.get_host() +", "# See the License for the specific language governing permissions", "next_page) if response: return response return django.contrib.auth.views.logout(request, next_page) def profile(request):", "import datetime from axes.decorators import watch_login import django.contrib.auth.views from django.core", "dt_login(request, from_modal=False): redirect_to = request.REQUEST.get('next', '/') is_first_login_ever = first_login_ever() backend_names", "for user: %s' % user.username request.audit['operationText'] = msg access_warn(request, msg)", "or agreed to in writing, software # distributed under the", "you may not use this file except in compliance #", "required by applicable law or agreed to in writing, software", "def oauth_authenticated(request): consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) token = oauth.Token(request.session['request_token']['oauth_token'], request.session['request_token']['oauth_token_secret'])", "% request.POST.get('username') request.audit['operationText'] = msg access_warn(request, msg) if from_modal or", "work. uid = None if uid is not None: try:", "try: userobj = User.objects.get(pk=uid) current_users[userobj] = last_access_map.get(userobj.username, { }) except", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "last_access_map.get(userobj.username, { }) except User.DoesNotExist: LOG.debug(\"User with id=%d does not", "'true': return JsonResponse({'auth': False}) else: first_user_form = None auth_form =", "use this file except in compliance # with the License.", "return dict( username=user.username, first_name=user.first_name, last_name=user.last_name, last_login=str(user.last_login), # datetime object needs", "if resp['status'] != '200': raise Exception(_(\"Invalid response from OAuth provider:", "or request.REQUEST.get('fromModal', 'false') == 'true': return JsonResponse({'auth': True}) else: return", "contributor license agreements. See the NOTICE file # distributed with", "changed, this resolution won't work. uid = None if uid", "user = authenticate(access_token=access_token) login(request, user) redirect_to = request.REQUEST.get('next', '/') return", "first_user_form and first_user_form.is_valid() if first_user or not is_first_login_ever: auth_form =", "%s' % user.username request.audit['operationText'] = msg access_warn(request, msg) if from_modal", "import JsonResponse from desktop.log.access import access_warn, last_access_map from desktop.conf import", "_profile_dict(user): return dict( username=user.username, first_name=user.first_name, last_name=user.last_name, last_login=str(user.last_login), # datetime object", "= get_profile(user) login(request, user) if request.session.test_cookie_worked(): request.session.delete_test_cookie() auto_create_home_backends = ['AllowAllBackend',", "for backend in get_backends()] @login_notrequired @watch_login def dt_login(request, from_modal=False): redirect_to", "hadoop.fs.exceptions import WebHdfsException from useradmin.models import get_profile from useradmin.views import", "= False msg = 'Failed login for user: %s' %", "agreed to in writing, software # distributed under the License", "[backend.__class__.__name__ for backend in get_backends()] @login_notrequired @watch_login def dt_login(request, from_modal=False):", "or auth_form, 'next': redirect_to, 'first_login_ever': is_first_login_ever, 'login_errors': request.method == 'POST',", "%s\") % resp) access_token = dict(cgi.parse_qsl(content)) user = authenticate(access_token=access_token) login(request,", "} for session in Session.objects.all(): try: uid = session.get_decoded().get(django.contrib.auth.SESSION_KEY) except", "provider: %s\") % resp) request.session['request_token'] = dict(cgi.parse_qsl(content)) url = \"%s?oauth_token=%s\"", "exist\" % uid) return current_users def first_login_ever(): backends = get_backends()", "is_active_directory }) def dt_logout(request, next_page=None): \"\"\"Log out the user\"\"\" username", "token = oauth.Token(request.session['request_token']['oauth_token'], request.session['request_token']['oauth_token_secret']) client = oauth.Client(consumer, token) resp, content", "in Session.objects.all(): try: uid = session.get_decoded().get(django.contrib.auth.SESSION_KEY) except SuspiciousOperation: # If", "distributed under the License is distributed on an \"AS IS\"", "create home directory.'), exc_info=e) request.error(_('Could not create home directory.')) if", "'POST', 'backend_names': backend_names, 'active_directory': is_active_directory }) def dt_logout(request, next_page=None): \"\"\"Log", "{ }) except User.DoesNotExist: LOG.debug(\"User with id=%d does not exist\"", "with this work for additional information # regarding copyright ownership.", "except: oauth = None import cgi import logging import urllib", "= oauth.Client(consumer) resp, content = client.request(OAUTH.REQUEST_TOKEN_URL.get(), \"POST\", body=urllib.urlencode({ 'oauth_callback': 'http://'", "first_user_form or auth_form, 'next': redirect_to, 'first_login_ever': is_first_login_ever, 'login_errors': request.method ==", "Exception(_(\"Invalid response from OAuth provider: %s\") % resp) access_token =", "User.objects.get(pk=uid) current_users[userobj] = last_access_map.get(userobj.username, { }) except User.DoesNotExist: LOG.debug(\"User with", "is not None consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) client = oauth.Client(consumer)", "express or implied. # See the License for the specific", "% user.username request.audit['operationText'] = msg access_warn(request, msg) if from_modal or", "}) except User.DoesNotExist: LOG.debug(\"User with id=%d does not exist\" %", "import watch_login import django.contrib.auth.views from django.core import urlresolvers from django.core.exceptions", "email=user.email) # OAuth is based on Twitter as example. @login_notrequired", "from OAuth provider: %s\") % resp) access_token = dict(cgi.parse_qsl(content)) user", "from django.contrib.sessions.models import Session from django.http import HttpResponseRedirect from django.utils.translation", "user info! first_user_form = is_first_login_ever and UserCreationForm(data=request.POST) or None first_user", "desktop.auth import forms as auth_forms from desktop.lib.django_util import render from", "import WebHdfsException from useradmin.models import get_profile from useradmin.views import ensure_home_directory,", "raise Exception(_(\"Invalid response from OAuth provider: %s\") % resp) request.session['request_token']", "example. @login_notrequired def oauth_login(request): assert oauth is not None consumer", "= get_backends() if backends: for backend in backends: if hasattr(backend,", "writing, software # distributed under the License is distributed on", "return current_users def first_login_ever(): backends = get_backends() for backend in", "directory.'), exc_info=e) request.error(_('Could not create home directory.')) if require_change_password(userprofile): return", "consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) client = oauth.Client(consumer) resp, content =", "the License. You may obtain a copy of the License", "under the License. try: import oauth2 as oauth except: oauth", "import urlresolvers from django.core.exceptions import SuspiciousOperation from django.contrib.auth import login,", "AuthenticationForm() if DEMO_ENABLED.get() and not 'admin' in request.REQUEST: user =", "DEMO_ENABLED.get() and not 'admin' in request.REQUEST: user = authenticate(username=request.user.username, password='<PASSWORD>')", "\"POST\", body=urllib.urlencode({ 'oauth_callback': 'http://' + request.get_host() + '/login/oauth_authenticated/' })) if", "OAUTH, DEMO_ENABLED from hadoop.fs.exceptions import WebHdfsException from useradmin.models import get_profile", "\"%s?oauth_token=%s\" % (OAUTH.AUTHENTICATE_URL.get(), request.session['request_token']['oauth_token']) return HttpResponseRedirect(url) @login_notrequired def oauth_authenticated(request): consumer", "forms as auth_forms from desktop.lib.django_util import render from desktop.lib.django_util import", "in backends: if hasattr(backend, 'is_first_login_ever') and backend.is_first_login_ever(): return True return", "auth_form.get_user() userprofile = get_profile(user) login(request, user) if request.session.test_cookie_worked(): request.session.delete_test_cookie() auto_create_home_backends", "DEMO_ENABLED from hadoop.fs.exceptions import WebHdfsException from useradmin.models import get_profile from", "under the Apache License, Version 2.0 (the # \"License\"); you", "as oauth except: oauth = None import cgi import logging", "if uid is not None: try: userobj = User.objects.get(pk=uid) current_users[userobj]", "AuthenticationForm. # It provides 'backends' on the User object. user", "request.user.get_username() request.audit = { 'username': username, 'operation': 'USER_LOGOUT', 'operationText': 'Logged", "CONDITIONS OF ANY KIND, either express or implied. # See", "require_change_password LOG = logging.getLogger(__name__) def get_current_users(): \"\"\"Return dictionary of User", "last access time\"\"\" current_users = { } for session in", "return False def get_backend_names(): return get_backends and [backend.__class__.__name__ for backend", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "need to validate user info! first_user_form = is_first_login_ever and UserCreationForm(data=request.POST)", "hasattr(backend, 'logout'): response = backend.logout(request, next_page) if response: return response", "resp, content = client.request(OAUTH.REQUEST_TOKEN_URL.get(), \"POST\", body=urllib.urlencode({ 'oauth_callback': 'http://' + request.get_host()", "from axes.decorators import watch_login import django.contrib.auth.views from django.core import urlresolvers", "= 'LdapBackend' in backend_names and ( bool(LDAP.NT_DOMAIN.get()) or bool(LDAP.LDAP_SERVERS.get()) )", "kwargs={'username': user.username})) userprofile.first_login = False userprofile.last_activity = datetime.now() userprofile.save() msg", "django.contrib.auth import login, get_backends, authenticate from django.contrib.auth.models import User from", "body=urllib.urlencode({ 'oauth_callback': 'http://' + request.get_host() + '/login/oauth_authenticated/' })) if resp['status']", "for user: %s' % request.POST.get('username') request.audit['operationText'] = msg access_warn(request, msg)", "e: LOG.error(_('Could not create home directory.'), exc_info=e) request.error(_('Could not create", "get_backends, authenticate from django.contrib.auth.models import User from django.contrib.sessions.models import Session", "datetime.now() userprofile.save() msg = 'Successful login for user: %s' %", "validate user info! first_user_form = is_first_login_ever and UserCreationForm(data=request.POST) or None", "request.REQUEST.get('fromModal', 'false') == 'true': return JsonResponse({'auth': False}) else: first_user_form =", "or more contributor license agreements. See the NOTICE file #", "first_name=user.first_name, last_name=user.last_name, last_login=str(user.last_login), # datetime object needs to be converted", "'username': username, 'operation': 'USER_LOGOUT', 'operationText': 'Logged out user: %s' %", "'Failed login for user: %s' % request.POST.get('username') request.audit['operationText'] = msg", "first_login_ever() backend_names = get_backend_names() is_active_directory = 'LdapBackend' in backend_names and", "from useradmin.views import ensure_home_directory, require_change_password LOG = logging.getLogger(__name__) def get_current_users():", "dt_logout(request, next_page=None): \"\"\"Log out the user\"\"\" username = request.user.get_username() request.audit", "_ from desktop.auth import forms as auth_forms from desktop.lib.django_util import", "= datetime.now() userprofile.save() msg = 'Successful login for user: %s'", "auth_form.is_valid(): # Must login by using the AuthenticationForm. # It", "JsonResponse({'auth': True}) else: return HttpResponseRedirect(redirect_to) else: request.audit['allowed'] = False msg", "the NOTICE file # distributed with this work for additional", "'backends' on the User object. user = auth_form.get_user() userprofile =", "_profile_dict(request.user)) def _profile_dict(user): return dict( username=user.username, first_name=user.first_name, last_name=user.last_name, last_login=str(user.last_login), #", "AuthenticationForm = auth_forms.AuthenticationForm if request.method == 'POST': request.audit = {", "login by using the AuthenticationForm. # It provides 'backends' on", "except (IOError, WebHdfsException), e: LOG.error(_('Could not create home directory.'), exc_info=e)", "OR CONDITIONS OF ANY KIND, either express or implied. #", "not None consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) client = oauth.Client(consumer) resp,", "LDAP, OAUTH, DEMO_ENABLED from hadoop.fs.exceptions import WebHdfsException from useradmin.models import", "this resolution won't work. uid = None if uid is", "= get_backend_names() is_active_directory = 'LdapBackend' in backend_names and ( bool(LDAP.NT_DOMAIN.get())", "the License is distributed on an \"AS IS\" BASIS, #", "objects and a dictionary of the user's IP address and", "if backends: for backend in backends: if hasattr(backend, 'logout'): response", "return JsonResponse({'auth': False}) else: first_user_form = None auth_form = AuthenticationForm()", "user.username) except (IOError, WebHdfsException), e: LOG.error(_('Could not create home directory.'),", "render from desktop.lib.django_util import login_notrequired from desktop.lib.django_util import JsonResponse from", "# If secret_key changed, this resolution won't work. uid =", "is_first_login_ever or any(backend in backend_names for backend in auto_create_home_backends): #", "HttpResponseRedirect(redirect_to) else: request.audit['allowed'] = False msg = 'Failed login for", "False}) else: first_user_form = None auth_form = AuthenticationForm() if DEMO_ENABLED.get()", "session.get_decoded().get(django.contrib.auth.SESSION_KEY) except SuspiciousOperation: # If secret_key changed, this resolution won't", "'LdapBackend' in backend_names and ( bool(LDAP.NT_DOMAIN.get()) or bool(LDAP.LDAP_SERVERS.get()) ) if", "user.username) return HttpResponseRedirect(redirect_to) if not from_modal: request.session.set_test_cookie() renderable_path = 'login.mako'", "from useradmin.models import get_profile from useradmin.views import ensure_home_directory, require_change_password LOG", "backend in backends: if hasattr(backend, 'is_first_login_ever') and backend.is_first_login_ever(): return True", "!= '200': raise Exception(_(\"Invalid response from OAuth provider: %s\") %", "out the user\"\"\" username = request.user.get_username() request.audit = { 'username':", "client.request(OAUTH.REQUEST_TOKEN_URL.get(), \"POST\", body=urllib.urlencode({ 'oauth_callback': 'http://' + request.get_host() + '/login/oauth_authenticated/' }))", "login for user: %s' % user.username request.audit['operationText'] = msg access_warn(request,", "from datetime import datetime from axes.decorators import watch_login import django.contrib.auth.views", "request.session['request_token'] = dict(cgi.parse_qsl(content)) url = \"%s?oauth_token=%s\" % (OAUTH.AUTHENTICATE_URL.get(), request.session['request_token']['oauth_token']) return", "'backend_names': backend_names, 'active_directory': is_active_directory }) def dt_logout(request, next_page=None): \"\"\"Log out", "be converted email=user.email) # OAuth is based on Twitter as", "'operation': 'USER_LOGOUT', 'operationText': 'Logged out user: %s' % username }", "\"\"\" Dumps JSON for user-profile information. \"\"\" return render(None, request,", "})) if resp['status'] != '200': raise Exception(_(\"Invalid response from OAuth", "'http://' + request.get_host() + '/login/oauth_authenticated/' })) if resp['status'] != '200':", "law or agreed to in writing, software # distributed under", "def get_backend_names(): return get_backends and [backend.__class__.__name__ for backend in get_backends()]", "directory for first user. try: ensure_home_directory(request.fs, user.username) except (IOError, WebHdfsException),", "get_profile(user) login(request, user) if request.session.test_cookie_worked(): request.session.delete_test_cookie() auto_create_home_backends = ['AllowAllBackend', 'LdapBackend',", "urlresolvers from django.core.exceptions import SuspiciousOperation from django.contrib.auth import login, get_backends,", "from django.core import urlresolvers from django.core.exceptions import SuspiciousOperation from django.contrib.auth", "oauth_login(request): assert oauth is not None consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get())", "access_token = dict(cgi.parse_qsl(content)) user = authenticate(access_token=access_token) login(request, user) redirect_to =", "request.audit['operationText'] = msg access_warn(request, msg) if from_modal or request.REQUEST.get('fromModal', 'false')", "= backend.logout(request, next_page) if response: return response return django.contrib.auth.views.logout(request, next_page)", "# limitations under the License. try: import oauth2 as oauth", "= first_login_ever() backend_names = get_backend_names() is_active_directory = 'LdapBackend' in backend_names", "ensure_home_directory(request.fs, user.username) return HttpResponseRedirect(redirect_to) if not from_modal: request.session.set_test_cookie() renderable_path =", "def profile(request): \"\"\" Dumps JSON for user-profile information. \"\"\" return", "client.request(OAUTH.ACCESS_TOKEN_URL.get(), \"GET\") if resp['status'] != '200': raise Exception(_(\"Invalid response from", "create home directory.')) if require_change_password(userprofile): return HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user', kwargs={'username': user.username})) userprofile.first_login", "# regarding copyright ownership. Cloudera, Inc. licenses this file #", "import django.contrib.auth.views from django.core import urlresolvers from django.core.exceptions import SuspiciousOperation", "home directory.')) if require_change_password(userprofile): return HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user', kwargs={'username': user.username})) userprofile.first_login =", "is not None: try: userobj = User.objects.get(pk=uid) current_users[userobj] = last_access_map.get(userobj.username,", "request.error(_('Could not create home directory.')) if require_change_password(userprofile): return HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user', kwargs={'username':", "SuspiciousOperation: # If secret_key changed, this resolution won't work. uid", "home directory for first user. try: ensure_home_directory(request.fs, user.username) except (IOError,", "False msg = 'Failed login for user: %s' % request.POST.get('username')", "= ['AllowAllBackend', 'LdapBackend', 'SpnegoDjangoBackend'] if is_first_login_ever or any(backend in backend_names", "may obtain a copy of the License at # #", "user.username request.audit['operationText'] = msg access_warn(request, msg) if from_modal or request.REQUEST.get('fromModal',", "auth_form, 'next': redirect_to, 'first_login_ever': is_first_login_ever, 'login_errors': request.method == 'POST', 'backend_names':", "'operation': 'USER_LOGIN', 'username': request.POST.get('username') } # For first login, need", "username, 'operation': 'USER_LOGOUT', 'operationText': 'Logged out user: %s' % username", "request.REQUEST.get('next', '/') is_first_login_ever = first_login_ever() backend_names = get_backend_names() is_active_directory =", "user. try: ensure_home_directory(request.fs, user.username) except (IOError, WebHdfsException), e: LOG.error(_('Could not", "url = \"%s?oauth_token=%s\" % (OAUTH.AUTHENTICATE_URL.get(), request.session['request_token']['oauth_token']) return HttpResponseRedirect(url) @login_notrequired def", "= auth_forms.LdapUserCreationForm AuthenticationForm = auth_forms.LdapAuthenticationForm else: UserCreationForm = auth_forms.UserCreationForm AuthenticationForm", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "'/') is_first_login_ever = first_login_ever() backend_names = get_backend_names() is_active_directory = 'LdapBackend'", "on Twitter as example. @login_notrequired def oauth_login(request): assert oauth is", "raise Exception(_(\"Invalid response from OAuth provider: %s\") % resp) access_token", "request.POST.get('username') request.audit['operationText'] = msg access_warn(request, msg) if from_modal or request.REQUEST.get('fromModal',", "else: request.audit['allowed'] = False msg = 'Failed login for user:", "'logout'): response = backend.logout(request, next_page) if response: return response return", "dict(cgi.parse_qsl(content)) url = \"%s?oauth_token=%s\" % (OAUTH.AUTHENTICATE_URL.get(), request.session['request_token']['oauth_token']) return HttpResponseRedirect(url) @login_notrequired", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "= auth_forms.UserCreationForm AuthenticationForm = auth_forms.AuthenticationForm if request.method == 'POST': request.audit", "Session from django.http import HttpResponseRedirect from django.utils.translation import ugettext as", "AuthenticationForm = auth_forms.LdapAuthenticationForm else: UserCreationForm = auth_forms.UserCreationForm AuthenticationForm = auth_forms.AuthenticationForm", "user) if request.session.test_cookie_worked(): request.session.delete_test_cookie() auto_create_home_backends = ['AllowAllBackend', 'LdapBackend', 'SpnegoDjangoBackend'] if", "User.DoesNotExist: LOG.debug(\"User with id=%d does not exist\" % uid) return", "for additional information # regarding copyright ownership. Cloudera, Inc. licenses", "{ 'username': username, 'operation': 'USER_LOGOUT', 'operationText': 'Logged out user: %s'", "resp) request.session['request_token'] = dict(cgi.parse_qsl(content)) url = \"%s?oauth_token=%s\" % (OAUTH.AUTHENTICATE_URL.get(), request.session['request_token']['oauth_token'])", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "if first_user or not is_first_login_ever: auth_form = AuthenticationForm(data=request.POST) if auth_form.is_valid():", "JsonResponse from desktop.log.access import access_warn, last_access_map from desktop.conf import LDAP,", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "first_user_form = is_first_login_ever and UserCreationForm(data=request.POST) or None first_user = first_user_form", "user = authenticate(username=request.user.username, password='<PASSWORD>') login(request, user) ensure_home_directory(request.fs, user.username) return HttpResponseRedirect(redirect_to)", "Inc. under one # or more contributor license agreements. See", "# Must login by using the AuthenticationForm. # It provides", "first_user_form.is_valid() if first_user or not is_first_login_ever: auth_form = AuthenticationForm(data=request.POST) if", "= None auth_form = AuthenticationForm() if DEMO_ENABLED.get() and not 'admin'", "def dt_login(request, from_modal=False): redirect_to = request.REQUEST.get('next', '/') is_first_login_ever = first_login_ever()", "first login, need to validate user info! first_user_form = is_first_login_ever", "= auth_form.get_user() userprofile = get_profile(user) login(request, user) if request.session.test_cookie_worked(): request.session.delete_test_cookie()", "in compliance # with the License. You may obtain a", "# to you under the Apache License, Version 2.0 (the", "redirect_to = request.REQUEST.get('next', '/') is_first_login_ever = first_login_ever() backend_names = get_backend_names()", "in backend_names for backend in auto_create_home_backends): # Create home directory", "JsonResponse({'auth': False}) else: first_user_form = None auth_form = AuthenticationForm() if", "renderable_path = 'login.mako' if from_modal: renderable_path = 'login_modal.mako' return render(renderable_path,", "hasattr(backend, 'is_first_login_ever') and backend.is_first_login_ever(): return True return False def get_backend_names():", "from desktop.lib.django_util import render from desktop.lib.django_util import login_notrequired from desktop.lib.django_util", "django.core.exceptions import SuspiciousOperation from django.contrib.auth import login, get_backends, authenticate from", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "dictionary of the user's IP address and last access time\"\"\"", "render(renderable_path, request, { 'action': urlresolvers.reverse('desktop.auth.views.dt_login'), 'form': first_user_form or auth_form, 'next':", "if from_modal: renderable_path = 'login_modal.mako' return render(renderable_path, request, { 'action':", "= 'Failed login for user: %s' % request.POST.get('username') request.audit['operationText'] =", "auth_form = AuthenticationForm(data=request.POST) if auth_form.is_valid(): # Must login by using", "Exception(_(\"Invalid response from OAuth provider: %s\") % resp) request.session['request_token'] =", "if auth_form.is_valid(): # Must login by using the AuthenticationForm. #", "'next': redirect_to, 'first_login_ever': is_first_login_ever, 'login_errors': request.method == 'POST', 'backend_names': backend_names,", "this file # to you under the Apache License, Version", "'Successful login for user: %s' % user.username request.audit['operationText'] = msg", "home directory.'), exc_info=e) request.error(_('Could not create home directory.')) if require_change_password(userprofile):", "= request.REQUEST.get('next', '/') is_first_login_ever = first_login_ever() backend_names = get_backend_names() is_active_directory", "does not exist\" % uid) return current_users def first_login_ever(): backends", "% uid) return current_users def first_login_ever(): backends = get_backends() for", "response from OAuth provider: %s\") % resp) access_token = dict(cgi.parse_qsl(content))", "import ugettext as _ from desktop.auth import forms as auth_forms", ") if is_active_directory: UserCreationForm = auth_forms.LdapUserCreationForm AuthenticationForm = auth_forms.LdapAuthenticationForm else:", "needs to be converted email=user.email) # OAuth is based on", "for backend in auto_create_home_backends): # Create home directory for first", "{ 'operation': 'USER_LOGIN', 'username': request.POST.get('username') } # For first login,", "get_backends and [backend.__class__.__name__ for backend in get_backends()] @login_notrequired @watch_login def", "%s\") % resp) request.session['request_token'] = dict(cgi.parse_qsl(content)) url = \"%s?oauth_token=%s\" %", "return HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user', kwargs={'username': user.username})) userprofile.first_login = False userprofile.last_activity = datetime.now()", "@login_notrequired @watch_login def dt_login(request, from_modal=False): redirect_to = request.REQUEST.get('next', '/') is_first_login_ever", "request.audit['allowed'] = False msg = 'Failed login for user: %s'", "dictionary of User objects and a dictionary of the user's", "= AuthenticationForm(data=request.POST) if auth_form.is_valid(): # Must login by using the", "auth_forms.AuthenticationForm if request.method == 'POST': request.audit = { 'operation': 'USER_LOGIN',", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "OAuth is based on Twitter as example. @login_notrequired def oauth_login(request):", "= \"%s?oauth_token=%s\" % (OAUTH.AUTHENTICATE_URL.get(), request.session['request_token']['oauth_token']) return HttpResponseRedirect(url) @login_notrequired def oauth_authenticated(request):", "Twitter as example. @login_notrequired def oauth_login(request): assert oauth is not", "oauth.Token(request.session['request_token']['oauth_token'], request.session['request_token']['oauth_token_secret']) client = oauth.Client(consumer, token) resp, content = client.request(OAUTH.ACCESS_TOKEN_URL.get(),", "renderable_path = 'login_modal.mako' return render(renderable_path, request, { 'action': urlresolvers.reverse('desktop.auth.views.dt_login'), 'form':", "auth_forms.LdapAuthenticationForm else: UserCreationForm = auth_forms.UserCreationForm AuthenticationForm = auth_forms.AuthenticationForm if request.method", "in auto_create_home_backends): # Create home directory for first user. try:", "WebHdfsException), e: LOG.error(_('Could not create home directory.'), exc_info=e) request.error(_('Could not", "'action': urlresolvers.reverse('desktop.auth.views.dt_login'), 'form': first_user_form or auth_form, 'next': redirect_to, 'first_login_ever': is_first_login_ever,", "backends: if hasattr(backend, 'logout'): response = backend.logout(request, next_page) if response:", "% username } backends = get_backends() if backends: for backend", "get_backends()] @login_notrequired @watch_login def dt_login(request, from_modal=False): redirect_to = request.REQUEST.get('next', '/')", "from django.contrib.auth.models import User from django.contrib.sessions.models import Session from django.http", "or implied. # See the License for the specific language", "for backend in backends: if hasattr(backend, 'logout'): response = backend.logout(request,", "require_change_password(userprofile): return HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user', kwargs={'username': user.username})) userprofile.first_login = False userprofile.last_activity =", "else: UserCreationForm = auth_forms.UserCreationForm AuthenticationForm = auth_forms.AuthenticationForm if request.method ==", "information # regarding copyright ownership. Cloudera, Inc. licenses this file", "request.method == 'POST', 'backend_names': backend_names, 'active_directory': is_active_directory }) def dt_logout(request,", "the user's IP address and last access time\"\"\" current_users =", "and ( bool(LDAP.NT_DOMAIN.get()) or bool(LDAP.LDAP_SERVERS.get()) ) if is_active_directory: UserCreationForm =", "# \"License\"); you may not use this file except in", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "import Session from django.http import HttpResponseRedirect from django.utils.translation import ugettext", "current_users = { } for session in Session.objects.all(): try: uid", "UserCreationForm = auth_forms.UserCreationForm AuthenticationForm = auth_forms.AuthenticationForm if request.method == 'POST':", "= 'Successful login for user: %s' % user.username request.audit['operationText'] =", "\"License\"); you may not use this file except in compliance", "from_modal or request.REQUEST.get('fromModal', 'false') == 'true': return JsonResponse({'auth': True}) else:", "python # Licensed to Cloudera, Inc. under one # or", "oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) token = oauth.Token(request.session['request_token']['oauth_token'], request.session['request_token']['oauth_token_secret']) client = oauth.Client(consumer, token)", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "user) ensure_home_directory(request.fs, user.username) return HttpResponseRedirect(redirect_to) if not from_modal: request.session.set_test_cookie() renderable_path", "None first_user = first_user_form and first_user_form.is_valid() if first_user or not", "django.http import HttpResponseRedirect from django.utils.translation import ugettext as _ from", "msg = 'Successful login for user: %s' % user.username request.audit['operationText']", "'form': first_user_form or auth_form, 'next': redirect_to, 'first_login_ever': is_first_login_ever, 'login_errors': request.method", "# distributed with this work for additional information # regarding", "auth_forms from desktop.lib.django_util import render from desktop.lib.django_util import login_notrequired from", "ensure_home_directory(request.fs, user.username) except (IOError, WebHdfsException), e: LOG.error(_('Could not create home", "def first_login_ever(): backends = get_backends() for backend in backends: if", "converted email=user.email) # OAuth is based on Twitter as example.", "@watch_login def dt_login(request, from_modal=False): redirect_to = request.REQUEST.get('next', '/') is_first_login_ever =", "first user. try: ensure_home_directory(request.fs, user.username) except (IOError, WebHdfsException), e: LOG.error(_('Could", "permissions and # limitations under the License. try: import oauth2", "and UserCreationForm(data=request.POST) or None first_user = first_user_form and first_user_form.is_valid() if", "user: %s' % user.username request.audit['operationText'] = msg access_warn(request, msg) if", "= oauth.Client(consumer, token) resp, content = client.request(OAUTH.ACCESS_TOKEN_URL.get(), \"GET\") if resp['status']", "%s' % username } backends = get_backends() if backends: for", "= is_first_login_ever and UserCreationForm(data=request.POST) or None first_user = first_user_form and", "= { 'username': username, 'operation': 'USER_LOGOUT', 'operationText': 'Logged out user:", "request, { 'action': urlresolvers.reverse('desktop.auth.views.dt_login'), 'form': first_user_form or auth_form, 'next': redirect_to,", "License. try: import oauth2 as oauth except: oauth = None", "Inc. licenses this file # to you under the Apache", "msg access_warn(request, msg) if from_modal or request.REQUEST.get('fromModal', 'false') == 'true':", "import LDAP, OAUTH, DEMO_ENABLED from hadoop.fs.exceptions import WebHdfsException from useradmin.models", "request.session['request_token']['oauth_token_secret']) client = oauth.Client(consumer, token) resp, content = client.request(OAUTH.ACCESS_TOKEN_URL.get(), \"GET\")", "def _profile_dict(user): return dict( username=user.username, first_name=user.first_name, last_name=user.last_name, last_login=str(user.last_login), # datetime", "in backend_names and ( bool(LDAP.NT_DOMAIN.get()) or bool(LDAP.LDAP_SERVERS.get()) ) if is_active_directory:", "return JsonResponse({'auth': True}) else: return HttpResponseRedirect(redirect_to) else: request.audit['allowed'] = False", "is_first_login_ever and UserCreationForm(data=request.POST) or None first_user = first_user_form and first_user_form.is_valid()", "= 'login_modal.mako' return render(renderable_path, request, { 'action': urlresolvers.reverse('desktop.auth.views.dt_login'), 'form': first_user_form", "= auth_forms.AuthenticationForm if request.method == 'POST': request.audit = { 'operation':", "desktop.log.access import access_warn, last_access_map from desktop.conf import LDAP, OAUTH, DEMO_ENABLED", "request.get_host() + '/login/oauth_authenticated/' })) if resp['status'] != '200': raise Exception(_(\"Invalid", "# # Unless required by applicable law or agreed to", "Version 2.0 (the # \"License\"); you may not use this", "except User.DoesNotExist: LOG.debug(\"User with id=%d does not exist\" % uid)", "= request.user.get_username() request.audit = { 'username': username, 'operation': 'USER_LOGOUT', 'operationText':", "'Logged out user: %s' % username } backends = get_backends()", "client = oauth.Client(consumer) resp, content = client.request(OAUTH.REQUEST_TOKEN_URL.get(), \"POST\", body=urllib.urlencode({ 'oauth_callback':", "backends: if hasattr(backend, 'is_first_login_ever') and backend.is_first_login_ever(): return True return False", "one # or more contributor license agreements. See the NOTICE", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "Session.objects.all(): try: uid = session.get_decoded().get(django.contrib.auth.SESSION_KEY) except SuspiciousOperation: # If secret_key", "% resp) request.session['request_token'] = dict(cgi.parse_qsl(content)) url = \"%s?oauth_token=%s\" % (OAUTH.AUTHENTICATE_URL.get(),", "SuspiciousOperation from django.contrib.auth import login, get_backends, authenticate from django.contrib.auth.models import", "or not is_first_login_ever: auth_form = AuthenticationForm(data=request.POST) if auth_form.is_valid(): # Must", "last_access_map from desktop.conf import LDAP, OAUTH, DEMO_ENABLED from hadoop.fs.exceptions import", "directory.')) if require_change_password(userprofile): return HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user', kwargs={'username': user.username})) userprofile.first_login = False", "login(request, user) if request.session.test_cookie_worked(): request.session.delete_test_cookie() auto_create_home_backends = ['AllowAllBackend', 'LdapBackend', 'SpnegoDjangoBackend']", "return render(renderable_path, request, { 'action': urlresolvers.reverse('desktop.auth.views.dt_login'), 'form': first_user_form or auth_form,", "% resp) access_token = dict(cgi.parse_qsl(content)) user = authenticate(access_token=access_token) login(request, user)", "OAUTH.CONSUMER_SECRET.get()) token = oauth.Token(request.session['request_token']['oauth_token'], request.session['request_token']['oauth_token_secret']) client = oauth.Client(consumer, token) resp,", "except in compliance # with the License. You may obtain", "= None if uid is not None: try: userobj =", "import cgi import logging import urllib from datetime import datetime", "and first_user_form.is_valid() if first_user or not is_first_login_ever: auth_form = AuthenticationForm(data=request.POST)", "\"\"\"Return dictionary of User objects and a dictionary of the", "return get_backends and [backend.__class__.__name__ for backend in get_backends()] @login_notrequired @watch_login", "from django.http import HttpResponseRedirect from django.utils.translation import ugettext as _", "= client.request(OAUTH.ACCESS_TOKEN_URL.get(), \"GET\") if resp['status'] != '200': raise Exception(_(\"Invalid response", "implied. # See the License for the specific language governing", "oauth.Client(consumer, token) resp, content = client.request(OAUTH.ACCESS_TOKEN_URL.get(), \"GET\") if resp['status'] !=", "NOTICE file # distributed with this work for additional information", "limitations under the License. try: import oauth2 as oauth except:", "False def get_backend_names(): return get_backends and [backend.__class__.__name__ for backend in", "For first login, need to validate user info! first_user_form =", "Dumps JSON for user-profile information. \"\"\" return render(None, request, _profile_dict(request.user))", "'POST': request.audit = { 'operation': 'USER_LOGIN', 'username': request.POST.get('username') } #", "this file except in compliance # with the License. You", "if from_modal or request.REQUEST.get('fromModal', 'false') == 'true': return JsonResponse({'auth': False})", "return HttpResponseRedirect(redirect_to) if not from_modal: request.session.set_test_cookie() renderable_path = 'login.mako' if", "from_modal: renderable_path = 'login_modal.mako' return render(renderable_path, request, { 'action': urlresolvers.reverse('desktop.auth.views.dt_login'),", "id=%d does not exist\" % uid) return current_users def first_login_ever():", "return HttpResponseRedirect(url) @login_notrequired def oauth_authenticated(request): consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) token", "UserCreationForm = auth_forms.LdapUserCreationForm AuthenticationForm = auth_forms.LdapAuthenticationForm else: UserCreationForm = auth_forms.UserCreationForm", "from desktop.lib.django_util import JsonResponse from desktop.log.access import access_warn, last_access_map from", "not create home directory.')) if require_change_password(userprofile): return HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user', kwargs={'username': user.username}))", "not 'admin' in request.REQUEST: user = authenticate(username=request.user.username, password='<PASSWORD>') login(request, user)", "\"\"\" return render(None, request, _profile_dict(request.user)) def _profile_dict(user): return dict( username=user.username,", "datetime object needs to be converted email=user.email) # OAuth is", "resp['status'] != '200': raise Exception(_(\"Invalid response from OAuth provider: %s\")", "as example. @login_notrequired def oauth_login(request): assert oauth is not None", "license agreements. See the NOTICE file # distributed with this", "user's IP address and last access time\"\"\" current_users = {", "by applicable law or agreed to in writing, software #", "resp, content = client.request(OAUTH.ACCESS_TOKEN_URL.get(), \"GET\") if resp['status'] != '200': raise", "= User.objects.get(pk=uid) current_users[userobj] = last_access_map.get(userobj.username, { }) except User.DoesNotExist: LOG.debug(\"User", "first_user or not is_first_login_ever: auth_form = AuthenticationForm(data=request.POST) if auth_form.is_valid(): #", "useradmin.views import ensure_home_directory, require_change_password LOG = logging.getLogger(__name__) def get_current_users(): \"\"\"Return", "content = client.request(OAUTH.ACCESS_TOKEN_URL.get(), \"GET\") if resp['status'] != '200': raise Exception(_(\"Invalid", "True}) else: return HttpResponseRedirect(redirect_to) else: request.audit['allowed'] = False msg =", "#!/usr/bin/env python # Licensed to Cloudera, Inc. under one #", "and backend.is_first_login_ever(): return True return False def get_backend_names(): return get_backends", "to be converted email=user.email) # OAuth is based on Twitter", "if hasattr(backend, 'logout'): response = backend.logout(request, next_page) if response: return", "User from django.contrib.sessions.models import Session from django.http import HttpResponseRedirect from", "backends = get_backends() if backends: for backend in backends: if", "oauth except: oauth = None import cgi import logging import", "HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user', kwargs={'username': user.username})) userprofile.first_login = False userprofile.last_activity = datetime.now() userprofile.save()", "provides 'backends' on the User object. user = auth_form.get_user() userprofile", "'login_modal.mako' return render(renderable_path, request, { 'action': urlresolvers.reverse('desktop.auth.views.dt_login'), 'form': first_user_form or", "request.session.set_test_cookie() renderable_path = 'login.mako' if from_modal: renderable_path = 'login_modal.mako' return", "from desktop.lib.django_util import login_notrequired from desktop.lib.django_util import JsonResponse from desktop.log.access", "and [backend.__class__.__name__ for backend in get_backends()] @login_notrequired @watch_login def dt_login(request,", "OAuth provider: %s\") % resp) access_token = dict(cgi.parse_qsl(content)) user =", "access_warn, last_access_map from desktop.conf import LDAP, OAUTH, DEMO_ENABLED from hadoop.fs.exceptions", "not use this file except in compliance # with the", "request.audit = { 'username': username, 'operation': 'USER_LOGOUT', 'operationText': 'Logged out", "user = auth_form.get_user() userprofile = get_profile(user) login(request, user) if request.session.test_cookie_worked():", "= logging.getLogger(__name__) def get_current_users(): \"\"\"Return dictionary of User objects and", "logging.getLogger(__name__) def get_current_users(): \"\"\"Return dictionary of User objects and a", "'SpnegoDjangoBackend'] if is_first_login_ever or any(backend in backend_names for backend in", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "WebHdfsException from useradmin.models import get_profile from useradmin.views import ensure_home_directory, require_change_password", "for first user. try: ensure_home_directory(request.fs, user.username) except (IOError, WebHdfsException), e:", "return True return False def get_backend_names(): return get_backends and [backend.__class__.__name__", "Unless required by applicable law or agreed to in writing,", "django.contrib.sessions.models import Session from django.http import HttpResponseRedirect from django.utils.translation import", "auth_forms.UserCreationForm AuthenticationForm = auth_forms.AuthenticationForm if request.method == 'POST': request.audit =", "auth_form = AuthenticationForm() if DEMO_ENABLED.get() and not 'admin' in request.REQUEST:", "Cloudera, Inc. licenses this file # to you under the", "HttpResponseRedirect(redirect_to) if not from_modal: request.session.set_test_cookie() renderable_path = 'login.mako' if from_modal:", "None if uid is not None: try: userobj = User.objects.get(pk=uid)", "the specific language governing permissions and # limitations under the", "Must login by using the AuthenticationForm. # It provides 'backends'", "# or more contributor license agreements. See the NOTICE file", "time\"\"\" current_users = { } for session in Session.objects.all(): try:", "if response: return response return django.contrib.auth.views.logout(request, next_page) def profile(request): \"\"\"", "and # limitations under the License. try: import oauth2 as", "= msg access_warn(request, msg) if from_modal or request.REQUEST.get('fromModal', 'false') ==", "% (OAUTH.AUTHENTICATE_URL.get(), request.session['request_token']['oauth_token']) return HttpResponseRedirect(url) @login_notrequired def oauth_authenticated(request): consumer =", "applicable law or agreed to in writing, software # distributed", "in backends: if hasattr(backend, 'logout'): response = backend.logout(request, next_page) if", "userprofile.first_login = False userprofile.last_activity = datetime.now() userprofile.save() msg = 'Successful", "'username': request.POST.get('username') } # For first login, need to validate", "not exist\" % uid) return current_users def first_login_ever(): backends =", "None consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) client = oauth.Client(consumer) resp, content", "datetime import datetime from axes.decorators import watch_login import django.contrib.auth.views from", "next_page) def profile(request): \"\"\" Dumps JSON for user-profile information. \"\"\"", "uid = session.get_decoded().get(django.contrib.auth.SESSION_KEY) except SuspiciousOperation: # If secret_key changed, this", "== 'POST': request.audit = { 'operation': 'USER_LOGIN', 'username': request.POST.get('username') }", "= dict(cgi.parse_qsl(content)) user = authenticate(access_token=access_token) login(request, user) redirect_to = request.REQUEST.get('next',", "in writing, software # distributed under the License is distributed", "import SuspiciousOperation from django.contrib.auth import login, get_backends, authenticate from django.contrib.auth.models", "['AllowAllBackend', 'LdapBackend', 'SpnegoDjangoBackend'] if is_first_login_ever or any(backend in backend_names for", "(the # \"License\"); you may not use this file except", "= dict(cgi.parse_qsl(content)) url = \"%s?oauth_token=%s\" % (OAUTH.AUTHENTICATE_URL.get(), request.session['request_token']['oauth_token']) return HttpResponseRedirect(url)", "HttpResponseRedirect(url) @login_notrequired def oauth_authenticated(request): consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) token =", "desktop.lib.django_util import login_notrequired from desktop.lib.django_util import JsonResponse from desktop.log.access import", "backend in backends: if hasattr(backend, 'logout'): response = backend.logout(request, next_page)", "} backends = get_backends() if backends: for backend in backends:", "= client.request(OAUTH.REQUEST_TOKEN_URL.get(), \"POST\", body=urllib.urlencode({ 'oauth_callback': 'http://' + request.get_host() + '/login/oauth_authenticated/'", "won't work. uid = None if uid is not None:", "from desktop.log.access import access_warn, last_access_map from desktop.conf import LDAP, OAUTH,", "= { } for session in Session.objects.all(): try: uid =", "def dt_logout(request, next_page=None): \"\"\"Log out the user\"\"\" username = request.user.get_username()", "as _ from desktop.auth import forms as auth_forms from desktop.lib.django_util", "username=user.username, first_name=user.first_name, last_name=user.last_name, last_login=str(user.last_login), # datetime object needs to be", "resolution won't work. uid = None if uid is not", "is_first_login_ever: auth_form = AuthenticationForm(data=request.POST) if auth_form.is_valid(): # Must login by", "msg) if from_modal or request.REQUEST.get('fromModal', 'false') == 'true': return JsonResponse({'auth':", "for session in Session.objects.all(): try: uid = session.get_decoded().get(django.contrib.auth.SESSION_KEY) except SuspiciousOperation:", "next_page=None): \"\"\"Log out the user\"\"\" username = request.user.get_username() request.audit =", "LOG = logging.getLogger(__name__) def get_current_users(): \"\"\"Return dictionary of User objects", "uid = None if uid is not None: try: userobj", "JSON for user-profile information. \"\"\" return render(None, request, _profile_dict(request.user)) def", "render(None, request, _profile_dict(request.user)) def _profile_dict(user): return dict( username=user.username, first_name=user.first_name, last_name=user.last_name,", "(OAUTH.AUTHENTICATE_URL.get(), request.session['request_token']['oauth_token']) return HttpResponseRedirect(url) @login_notrequired def oauth_authenticated(request): consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(),", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "not None: try: userobj = User.objects.get(pk=uid) current_users[userobj] = last_access_map.get(userobj.username, {", "} # For first login, need to validate user info!", "oauth2 as oauth except: oauth = None import cgi import", "oauth_authenticated(request): consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) token = oauth.Token(request.session['request_token']['oauth_token'], request.session['request_token']['oauth_token_secret']) client", "import access_warn, last_access_map from desktop.conf import LDAP, OAUTH, DEMO_ENABLED from", "None: try: userobj = User.objects.get(pk=uid) current_users[userobj] = last_access_map.get(userobj.username, { })", "backend_names and ( bool(LDAP.NT_DOMAIN.get()) or bool(LDAP.LDAP_SERVERS.get()) ) if is_active_directory: UserCreationForm", "user: %s' % username } backends = get_backends() if backends:", "LOG.error(_('Could not create home directory.'), exc_info=e) request.error(_('Could not create home", "get_backend_names(): return get_backends and [backend.__class__.__name__ for backend in get_backends()] @login_notrequired", "info! first_user_form = is_first_login_ever and UserCreationForm(data=request.POST) or None first_user =", "False userprofile.last_activity = datetime.now() userprofile.save() msg = 'Successful login for", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "return HttpResponseRedirect(redirect_to) else: request.audit['allowed'] = False msg = 'Failed login", "'active_directory': is_active_directory }) def dt_logout(request, next_page=None): \"\"\"Log out the user\"\"\"", "with the License. You may obtain a copy of the", "'USER_LOGIN', 'username': request.POST.get('username') } # For first login, need to", "login, need to validate user info! first_user_form = is_first_login_ever and", "login_notrequired from desktop.lib.django_util import JsonResponse from desktop.log.access import access_warn, last_access_map", "import render from desktop.lib.django_util import login_notrequired from desktop.lib.django_util import JsonResponse", "from_modal or request.REQUEST.get('fromModal', 'false') == 'true': return JsonResponse({'auth': False}) else:", "first_user_form = None auth_form = AuthenticationForm() if DEMO_ENABLED.get() and not", "AuthenticationForm(data=request.POST) if auth_form.is_valid(): # Must login by using the AuthenticationForm.", "except SuspiciousOperation: # If secret_key changed, this resolution won't work.", "urlresolvers.reverse('desktop.auth.views.dt_login'), 'form': first_user_form or auth_form, 'next': redirect_to, 'first_login_ever': is_first_login_ever, 'login_errors':", "token) resp, content = client.request(OAUTH.ACCESS_TOKEN_URL.get(), \"GET\") if resp['status'] != '200':", "dict(cgi.parse_qsl(content)) user = authenticate(access_token=access_token) login(request, user) redirect_to = request.REQUEST.get('next', '/')", "django.contrib.auth.models import User from django.contrib.sessions.models import Session from django.http import", "auto_create_home_backends = ['AllowAllBackend', 'LdapBackend', 'SpnegoDjangoBackend'] if is_first_login_ever or any(backend in", "the License for the specific language governing permissions and #", "'login_errors': request.method == 'POST', 'backend_names': backend_names, 'active_directory': is_active_directory }) def", "out user: %s' % username } backends = get_backends() if", "dict( username=user.username, first_name=user.first_name, last_name=user.last_name, last_login=str(user.last_login), # datetime object needs to", "'is_first_login_ever') and backend.is_first_login_ever(): return True return False def get_backend_names(): return", "msg = 'Failed login for user: %s' % request.POST.get('username') request.audit['operationText']", "by using the AuthenticationForm. # It provides 'backends' on the", "file # to you under the Apache License, Version 2.0", "either express or implied. # See the License for the", "login for user: %s' % request.POST.get('username') request.audit['operationText'] = msg access_warn(request,", "# with the License. You may obtain a copy of", "redirect_to, 'first_login_ever': is_first_login_ever, 'login_errors': request.method == 'POST', 'backend_names': backend_names, 'active_directory':", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "'oauth_callback': 'http://' + request.get_host() + '/login/oauth_authenticated/' })) if resp['status'] !=", "the User object. user = auth_form.get_user() userprofile = get_profile(user) login(request,", "import oauth2 as oauth except: oauth = None import cgi", "assert oauth is not None consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) client", "from_modal=False): redirect_to = request.REQUEST.get('next', '/') is_first_login_ever = first_login_ever() backend_names =", "in request.REQUEST: user = authenticate(username=request.user.username, password='<PASSWORD>') login(request, user) ensure_home_directory(request.fs, user.username)", "If secret_key changed, this resolution won't work. uid = None", "logging import urllib from datetime import datetime from axes.decorators import", "copyright ownership. Cloudera, Inc. licenses this file # to you", "under one # or more contributor license agreements. See the", "username = request.user.get_username() request.audit = { 'username': username, 'operation': 'USER_LOGOUT',", "or any(backend in backend_names for backend in auto_create_home_backends): # Create", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "Cloudera, Inc. under one # or more contributor license agreements.", "import login, get_backends, authenticate from django.contrib.auth.models import User from django.contrib.sessions.models", "request.session['request_token']['oauth_token']) return HttpResponseRedirect(url) @login_notrequired def oauth_authenticated(request): consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get())", "axes.decorators import watch_login import django.contrib.auth.views from django.core import urlresolvers from", "if require_change_password(userprofile): return HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user', kwargs={'username': user.username})) userprofile.first_login = False userprofile.last_activity", "for backend in backends: if hasattr(backend, 'is_first_login_ever') and backend.is_first_login_ever(): return", "None import cgi import logging import urllib from datetime import", "# Create home directory for first user. try: ensure_home_directory(request.fs, user.username)", "to Cloudera, Inc. under one # or more contributor license", "the Apache License, Version 2.0 (the # \"License\"); you may", "= first_user_form and first_user_form.is_valid() if first_user or not is_first_login_ever: auth_form", "user.username})) userprofile.first_login = False userprofile.last_activity = datetime.now() userprofile.save() msg =", "is_first_login_ever, 'login_errors': request.method == 'POST', 'backend_names': backend_names, 'active_directory': is_active_directory })", "def oauth_login(request): assert oauth is not None consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(),", "oauth is not None consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get()) client =", "request, _profile_dict(request.user)) def _profile_dict(user): return dict( username=user.username, first_name=user.first_name, last_name=user.last_name, last_login=str(user.last_login),", "desktop.lib.django_util import JsonResponse from desktop.log.access import access_warn, last_access_map from desktop.conf", "authenticate(username=request.user.username, password='<PASSWORD>') login(request, user) ensure_home_directory(request.fs, user.username) return HttpResponseRedirect(redirect_to) if not", "import urllib from datetime import datetime from axes.decorators import watch_login", "you under the Apache License, Version 2.0 (the # \"License\");", "from desktop.auth import forms as auth_forms from desktop.lib.django_util import render", "'admin' in request.REQUEST: user = authenticate(username=request.user.username, password='<PASSWORD>') login(request, user) ensure_home_directory(request.fs,", "if request.method == 'POST': request.audit = { 'operation': 'USER_LOGIN', 'username':", "session in Session.objects.all(): try: uid = session.get_decoded().get(django.contrib.auth.SESSION_KEY) except SuspiciousOperation: #", "language governing permissions and # limitations under the License. try:", "return render(None, request, _profile_dict(request.user)) def _profile_dict(user): return dict( username=user.username, first_name=user.first_name,", "# It provides 'backends' on the User object. user =", "'LdapBackend', 'SpnegoDjangoBackend'] if is_first_login_ever or any(backend in backend_names for backend", "and not 'admin' in request.REQUEST: user = authenticate(username=request.user.username, password='<PASSWORD>') login(request,", "agreements. See the NOTICE file # distributed with this work", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "response return django.contrib.auth.views.logout(request, next_page) def profile(request): \"\"\" Dumps JSON for", "licenses this file # to you under the Apache License,", "= False userprofile.last_activity = datetime.now() userprofile.save() msg = 'Successful login", "get_backend_names() is_active_directory = 'LdapBackend' in backend_names and ( bool(LDAP.NT_DOMAIN.get()) or", "return response return django.contrib.auth.views.logout(request, next_page) def profile(request): \"\"\" Dumps JSON", "backend in get_backends()] @login_notrequired @watch_login def dt_login(request, from_modal=False): redirect_to =", "# distributed under the License is distributed on an \"AS", "'false') == 'true': return JsonResponse({'auth': True}) else: return HttpResponseRedirect(redirect_to) else:", "# OAuth is based on Twitter as example. @login_notrequired def", "work for additional information # regarding copyright ownership. Cloudera, Inc.", "# Unless required by applicable law or agreed to in", "uid is not None: try: userobj = User.objects.get(pk=uid) current_users[userobj] =", "= last_access_map.get(userobj.username, { }) except User.DoesNotExist: LOG.debug(\"User with id=%d does", "= { 'operation': 'USER_LOGIN', 'username': request.POST.get('username') } # For first", "get_profile from useradmin.views import ensure_home_directory, require_change_password LOG = logging.getLogger(__name__) def", "bool(LDAP.LDAP_SERVERS.get()) ) if is_active_directory: UserCreationForm = auth_forms.LdapUserCreationForm AuthenticationForm = auth_forms.LdapAuthenticationForm", "backends = get_backends() for backend in backends: if hasattr(backend, 'is_first_login_ever')", "'false') == 'true': return JsonResponse({'auth': False}) else: first_user_form = None", "}) def dt_logout(request, next_page=None): \"\"\"Log out the user\"\"\" username =", "access time\"\"\" current_users = { } for session in Session.objects.all():", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "provider: %s\") % resp) access_token = dict(cgi.parse_qsl(content)) user = authenticate(access_token=access_token)", "or request.REQUEST.get('fromModal', 'false') == 'true': return JsonResponse({'auth': False}) else: first_user_form", "= authenticate(access_token=access_token) login(request, user) redirect_to = request.REQUEST.get('next', '/') return HttpResponseRedirect(redirect_to)", "oauth.Client(consumer) resp, content = client.request(OAUTH.REQUEST_TOKEN_URL.get(), \"POST\", body=urllib.urlencode({ 'oauth_callback': 'http://' +", "last_login=str(user.last_login), # datetime object needs to be converted email=user.email) #", "It provides 'backends' on the User object. user = auth_form.get_user()", "License. You may obtain a copy of the License at", "User objects and a dictionary of the user's IP address", "request.method == 'POST': request.audit = { 'operation': 'USER_LOGIN', 'username': request.POST.get('username')", "any(backend in backend_names for backend in auto_create_home_backends): # Create home", "You may obtain a copy of the License at #", "django.contrib.auth.views from django.core import urlresolvers from django.core.exceptions import SuspiciousOperation from", "User object. user = auth_form.get_user() userprofile = get_profile(user) login(request, user)", "import HttpResponseRedirect from django.utils.translation import ugettext as _ from desktop.auth", "as auth_forms from desktop.lib.django_util import render from desktop.lib.django_util import login_notrequired", "(IOError, WebHdfsException), e: LOG.error(_('Could not create home directory.'), exc_info=e) request.error(_('Could", "( bool(LDAP.NT_DOMAIN.get()) or bool(LDAP.LDAP_SERVERS.get()) ) if is_active_directory: UserCreationForm = auth_forms.LdapUserCreationForm", "username } backends = get_backends() if backends: for backend in", "== 'true': return JsonResponse({'auth': True}) else: return HttpResponseRedirect(redirect_to) else: request.audit['allowed']", "'operationText': 'Logged out user: %s' % username } backends =", "compliance # with the License. You may obtain a copy", "= auth_forms.LdapAuthenticationForm else: UserCreationForm = auth_forms.UserCreationForm AuthenticationForm = auth_forms.AuthenticationForm if", "in get_backends()] @login_notrequired @watch_login def dt_login(request, from_modal=False): redirect_to = request.REQUEST.get('next',", "if is_active_directory: UserCreationForm = auth_forms.LdapUserCreationForm AuthenticationForm = auth_forms.LdapAuthenticationForm else: UserCreationForm", "= None import cgi import logging import urllib from datetime", "= get_backends() for backend in backends: if hasattr(backend, 'is_first_login_ever') and", "django.utils.translation import ugettext as _ from desktop.auth import forms as" ]
[ "= name def get_follow_list(self): return self.follow_list def set_follow_list(self, follow_list): self.follow_list", "== other.get_follow_list() \\ and self.intention == other.get_intention() and self.lane ==", "get_name(self): return self.name def set_name(self, name): self.name = name def", "= intention def get_lane(self): return self.lane def set_lane(self, lane): self.lane", "follow_list def get_intention(self): return self.intention def set_intention(self, intention): self.intention =", "intention, lane): self.name = name self.follow_list = follow_list self.intention =", "def set_intention(self, intention): self.intention = intention def get_lane(self): return self.lane", "self.intention def set_intention(self, intention): self.intention = intention def get_lane(self): return", "self.follow_list = follow_list self.intention = intention self.lane = lane def", "get_follow_list(self): return self.follow_list def set_follow_list(self, follow_list): self.follow_list = follow_list def", "intention): self.intention = intention def get_lane(self): return self.lane def set_lane(self,", "__eq__(self, other): if isinstance(other, Node): if self.name == other.get_name() and", "True return False def get_name(self): return self.name def set_name(self, name):", "return False def get_name(self): return self.name def set_name(self, name): self.name", "__init__(self, name, follow_list, intention, lane): self.name = name self.follow_list =", "Node(object): def __init__(self, name, follow_list, intention, lane): self.name = name", "lane def __eq__(self, other): if isinstance(other, Node): if self.name ==", "== other.get_lane(): return True return False def get_name(self): return self.name", "return self.name def set_name(self, name): self.name = name def get_follow_list(self):", "self.intention = intention def get_lane(self): return self.lane def set_lane(self, lane):", "== other.get_intention() and self.lane == other.get_lane(): return True return False", "and self.follow_list == other.get_follow_list() \\ and self.intention == other.get_intention() and", "intention self.lane = lane def __eq__(self, other): if isinstance(other, Node):", "self.follow_list == other.get_follow_list() \\ and self.intention == other.get_intention() and self.lane", "class Node(object): def __init__(self, name, follow_list, intention, lane): self.name =", "lane): self.name = name self.follow_list = follow_list self.intention = intention", "def get_follow_list(self): return self.follow_list def set_follow_list(self, follow_list): self.follow_list = follow_list", "self.name = name def get_follow_list(self): return self.follow_list def set_follow_list(self, follow_list):", "= follow_list def get_intention(self): return self.intention def set_intention(self, intention): self.intention", "and self.lane == other.get_lane(): return True return False def get_name(self):", "return True return False def get_name(self): return self.name def set_name(self,", "self.lane == other.get_lane(): return True return False def get_name(self): return", "follow_list, intention, lane): self.name = name self.follow_list = follow_list self.intention", "= lane def __eq__(self, other): if isinstance(other, Node): if self.name", "== other.get_name() and self.follow_list == other.get_follow_list() \\ and self.intention ==", "set_follow_list(self, follow_list): self.follow_list = follow_list def get_intention(self): return self.intention def", "def get_lane(self): return self.lane def set_lane(self, lane): self.lane = lane", "self.follow_list def set_follow_list(self, follow_list): self.follow_list = follow_list def get_intention(self): return", "self.name == other.get_name() and self.follow_list == other.get_follow_list() \\ and self.intention", "def set_follow_list(self, follow_list): self.follow_list = follow_list def get_intention(self): return self.intention", "self.intention == other.get_intention() and self.lane == other.get_lane(): return True return", "def __eq__(self, other): if isinstance(other, Node): if self.name == other.get_name()", "name self.follow_list = follow_list self.intention = intention self.lane = lane", "self.name = name self.follow_list = follow_list self.intention = intention self.lane", "other.get_follow_list() \\ and self.intention == other.get_intention() and self.lane == other.get_lane():", "def set_name(self, name): self.name = name def get_follow_list(self): return self.follow_list", "set_name(self, name): self.name = name def get_follow_list(self): return self.follow_list def", "other): if isinstance(other, Node): if self.name == other.get_name() and self.follow_list", "def __init__(self, name, follow_list, intention, lane): self.name = name self.follow_list", "and self.intention == other.get_intention() and self.lane == other.get_lane(): return True", "intention def get_lane(self): return self.lane def set_lane(self, lane): self.lane =", "get_intention(self): return self.intention def set_intention(self, intention): self.intention = intention def", "self.name def set_name(self, name): self.name = name def get_follow_list(self): return", "return self.intention def set_intention(self, intention): self.intention = intention def get_lane(self):", "if isinstance(other, Node): if self.name == other.get_name() and self.follow_list ==", "False def get_name(self): return self.name def set_name(self, name): self.name =", "isinstance(other, Node): if self.name == other.get_name() and self.follow_list == other.get_follow_list()", "return self.follow_list def set_follow_list(self, follow_list): self.follow_list = follow_list def get_intention(self):", "def get_intention(self): return self.intention def set_intention(self, intention): self.intention = intention", "self.follow_list = follow_list def get_intention(self): return self.intention def set_intention(self, intention):", "= name self.follow_list = follow_list self.intention = intention self.lane =", "other.get_intention() and self.lane == other.get_lane(): return True return False def", "self.lane = lane def __eq__(self, other): if isinstance(other, Node): if", "Node): if self.name == other.get_name() and self.follow_list == other.get_follow_list() \\", "def get_name(self): return self.name def set_name(self, name): self.name = name", "if self.name == other.get_name() and self.follow_list == other.get_follow_list() \\ and", "follow_list self.intention = intention self.lane = lane def __eq__(self, other):", "= intention self.lane = lane def __eq__(self, other): if isinstance(other,", "set_intention(self, intention): self.intention = intention def get_lane(self): return self.lane def", "other.get_name() and self.follow_list == other.get_follow_list() \\ and self.intention == other.get_intention()", "\\ and self.intention == other.get_intention() and self.lane == other.get_lane(): return", "self.intention = intention self.lane = lane def __eq__(self, other): if", "name, follow_list, intention, lane): self.name = name self.follow_list = follow_list", "name): self.name = name def get_follow_list(self): return self.follow_list def set_follow_list(self,", "follow_list): self.follow_list = follow_list def get_intention(self): return self.intention def set_intention(self,", "other.get_lane(): return True return False def get_name(self): return self.name def", "= follow_list self.intention = intention self.lane = lane def __eq__(self,", "name def get_follow_list(self): return self.follow_list def set_follow_list(self, follow_list): self.follow_list =" ]
[ "re.IGNORECASE) def get_url(url, headers=0, gid=0, sheet=None): parts = parse.urlparse(url) if", "headers = int(qs['headers'][-1]) if 'gid' in qs: gid = qs['gid'][-1]", "path = parts.path path = '/'.join((path.rstrip('/'), 'gviz/tq')) qs = parse.parse_qs(parts.query)", "from __future__ import division from __future__ import print_function from __future__", "args['gid'] = gid params = parse.urlencode(args) return parse.urlunparse( (parts.scheme, parts.netloc,", "absolute_import from __future__ import division from __future__ import print_function from", "# fallback to regex to extract from match = FROM_REGEX.search(sql)", "<reponame>tim-werner/gsheets-db-api<filename>gsheetsdb/url.py from __future__ import absolute_import from __future__ import division from", "in qs: headers = int(qs['headers'][-1]) if 'gid' in qs: gid", "if parts.fragment.startswith('gid='): gid = parts.fragment[len('gid='):] args = OrderedDict() if headers", "qs: sheet = qs['sheet'][-1] if parts.fragment.startswith('gid='): gid = parts.fragment[len('gid='):] args", "gid params = parse.urlencode(args) return parse.urlunparse( (parts.scheme, parts.netloc, path, None,", "0: args['headers'] = headers if sheet is not None: args['sheet']", "sheet else: args['gid'] = gid params = parse.urlencode(args) return parse.urlunparse(", "parts.path[:-len('/edit')] else: path = parts.path path = '/'.join((path.rstrip('/'), 'gviz/tq')) qs", "= parts.path[:-len('/edit')] else: path = parts.path path = '/'.join((path.rstrip('/'), 'gviz/tq'))", "= qs['gid'][-1] if 'sheet' in qs: sheet = qs['sheet'][-1] if", "in qs: sheet = qs['sheet'][-1] if parts.fragment.startswith('gid='): gid = parts.fragment[len('gid='):]", "parts = parse.urlparse(url) if parts.path.endswith('/edit'): path = parts.path[:-len('/edit')] else: path", "gid=0, sheet=None): parts = parse.urlparse(url) if parts.path.endswith('/edit'): path = parts.path[:-len('/edit')]", "extract_url(sql): try: return parse_sql(sql)['from'] except pyparsing.ParseException: # fallback to regex", "= '/'.join((path.rstrip('/'), 'gviz/tq')) qs = parse.parse_qs(parts.query) if 'headers' in qs:", "args = OrderedDict() if headers > 0: args['headers'] = headers", "qs['sheet'][-1] if parts.fragment.startswith('gid='): gid = parts.fragment[len('gid='):] args = OrderedDict() if", "qs = parse.parse_qs(parts.query) if 'headers' in qs: headers = int(qs['headers'][-1])", "qs: gid = qs['gid'][-1] if 'sheet' in qs: sheet =", "= qs['sheet'][-1] if parts.fragment.startswith('gid='): gid = parts.fragment[len('gid='):] args = OrderedDict()", "args['headers'] = headers if sheet is not None: args['sheet'] =", "(parts.scheme, parts.netloc, path, None, params, None)) def extract_url(sql): try: return", "path = '/'.join((path.rstrip('/'), 'gviz/tq')) qs = parse.parse_qs(parts.query) if 'headers' in", "parse.urlparse(url) if parts.path.endswith('/edit'): path = parts.path[:-len('/edit')] else: path = parts.path", "import re from six.moves.urllib import parse FROM_REGEX = re.compile(' from", "'gid' in qs: gid = qs['gid'][-1] if 'sheet' in qs:", "'headers' in qs: headers = int(qs['headers'][-1]) if 'gid' in qs:", "get_url(url, headers=0, gid=0, sheet=None): parts = parse.urlparse(url) if parts.path.endswith('/edit'): path", "qs: headers = int(qs['headers'][-1]) if 'gid' in qs: gid =", "six.moves.urllib import parse FROM_REGEX = re.compile(' from (\"http.*?\")', re.IGNORECASE) def", "= gid params = parse.urlencode(args) return parse.urlunparse( (parts.scheme, parts.netloc, path,", "headers=0, gid=0, sheet=None): parts = parse.urlparse(url) if parts.path.endswith('/edit'): path =", "in qs: gid = qs['gid'][-1] if 'sheet' in qs: sheet", "sheet=None): parts = parse.urlparse(url) if parts.path.endswith('/edit'): path = parts.path[:-len('/edit')] else:", "__future__ import print_function from __future__ import unicode_literals from collections import", "__future__ import division from __future__ import print_function from __future__ import", "import OrderedDict from moz_sql_parser import parse as parse_sql import pyparsing", "parse_sql import pyparsing import re from six.moves.urllib import parse FROM_REGEX", "return parse.urlunparse( (parts.scheme, parts.netloc, path, None, params, None)) def extract_url(sql):", "parts.fragment[len('gid='):] args = OrderedDict() if headers > 0: args['headers'] =", "OrderedDict() if headers > 0: args['headers'] = headers if sheet", "to extract from match = FROM_REGEX.search(sql) if match: return match.group(1).strip('\"')", "import print_function from __future__ import unicode_literals from collections import OrderedDict", "as parse_sql import pyparsing import re from six.moves.urllib import parse", "'sheet' in qs: sheet = qs['sheet'][-1] if parts.fragment.startswith('gid='): gid =", "None: args['sheet'] = sheet else: args['gid'] = gid params =", "if 'sheet' in qs: sheet = qs['sheet'][-1] if parts.fragment.startswith('gid='): gid", "> 0: args['headers'] = headers if sheet is not None:", "from collections import OrderedDict from moz_sql_parser import parse as parse_sql", "= headers if sheet is not None: args['sheet'] = sheet", "parse.urlencode(args) return parse.urlunparse( (parts.scheme, parts.netloc, path, None, params, None)) def", "(\"http.*?\")', re.IGNORECASE) def get_url(url, headers=0, gid=0, sheet=None): parts = parse.urlparse(url)", "try: return parse_sql(sql)['from'] except pyparsing.ParseException: # fallback to regex to", "pyparsing import re from six.moves.urllib import parse FROM_REGEX = re.compile('", "from __future__ import print_function from __future__ import unicode_literals from collections", "FROM_REGEX = re.compile(' from (\"http.*?\")', re.IGNORECASE) def get_url(url, headers=0, gid=0,", "headers if sheet is not None: args['sheet'] = sheet else:", "from __future__ import absolute_import from __future__ import division from __future__", "None)) def extract_url(sql): try: return parse_sql(sql)['from'] except pyparsing.ParseException: # fallback", "parse as parse_sql import pyparsing import re from six.moves.urllib import", "sheet is not None: args['sheet'] = sheet else: args['gid'] =", "else: args['gid'] = gid params = parse.urlencode(args) return parse.urlunparse( (parts.scheme,", "if headers > 0: args['headers'] = headers if sheet is", "qs['gid'][-1] if 'sheet' in qs: sheet = qs['sheet'][-1] if parts.fragment.startswith('gid='):", "from __future__ import unicode_literals from collections import OrderedDict from moz_sql_parser", "from (\"http.*?\")', re.IGNORECASE) def get_url(url, headers=0, gid=0, sheet=None): parts =", "import unicode_literals from collections import OrderedDict from moz_sql_parser import parse", "return parse_sql(sql)['from'] except pyparsing.ParseException: # fallback to regex to extract", "except pyparsing.ParseException: # fallback to regex to extract from match", "headers > 0: args['headers'] = headers if sheet is not", "def get_url(url, headers=0, gid=0, sheet=None): parts = parse.urlparse(url) if parts.path.endswith('/edit'):", "is not None: args['sheet'] = sheet else: args['gid'] = gid", "params, None)) def extract_url(sql): try: return parse_sql(sql)['from'] except pyparsing.ParseException: #", "= OrderedDict() if headers > 0: args['headers'] = headers if", "args['sheet'] = sheet else: args['gid'] = gid params = parse.urlencode(args)", "= sheet else: args['gid'] = gid params = parse.urlencode(args) return", "re.compile(' from (\"http.*?\")', re.IGNORECASE) def get_url(url, headers=0, gid=0, sheet=None): parts", "import pyparsing import re from six.moves.urllib import parse FROM_REGEX =", "parts.path path = '/'.join((path.rstrip('/'), 'gviz/tq')) qs = parse.parse_qs(parts.query) if 'headers'", "unicode_literals from collections import OrderedDict from moz_sql_parser import parse as", "= parse.urlparse(url) if parts.path.endswith('/edit'): path = parts.path[:-len('/edit')] else: path =", "sheet = qs['sheet'][-1] if parts.fragment.startswith('gid='): gid = parts.fragment[len('gid='):] args =", "= int(qs['headers'][-1]) if 'gid' in qs: gid = qs['gid'][-1] if", "import division from __future__ import print_function from __future__ import unicode_literals", "if sheet is not None: args['sheet'] = sheet else: args['gid']", "__future__ import absolute_import from __future__ import division from __future__ import", "gid = parts.fragment[len('gid='):] args = OrderedDict() if headers > 0:", "collections import OrderedDict from moz_sql_parser import parse as parse_sql import", "gid = qs['gid'][-1] if 'sheet' in qs: sheet = qs['sheet'][-1]", "division from __future__ import print_function from __future__ import unicode_literals from", "not None: args['sheet'] = sheet else: args['gid'] = gid params", "path = parts.path[:-len('/edit')] else: path = parts.path path = '/'.join((path.rstrip('/'),", "parts.netloc, path, None, params, None)) def extract_url(sql): try: return parse_sql(sql)['from']", "parse_sql(sql)['from'] except pyparsing.ParseException: # fallback to regex to extract from", "parts.fragment.startswith('gid='): gid = parts.fragment[len('gid='):] args = OrderedDict() if headers >", "params = parse.urlencode(args) return parse.urlunparse( (parts.scheme, parts.netloc, path, None, params,", "parse.parse_qs(parts.query) if 'headers' in qs: headers = int(qs['headers'][-1]) if 'gid'", "regex to extract from match = FROM_REGEX.search(sql) if match: return", "parse.urlunparse( (parts.scheme, parts.netloc, path, None, params, None)) def extract_url(sql): try:", "int(qs['headers'][-1]) if 'gid' in qs: gid = qs['gid'][-1] if 'sheet'", "= re.compile(' from (\"http.*?\")', re.IGNORECASE) def get_url(url, headers=0, gid=0, sheet=None):", "= parse.parse_qs(parts.query) if 'headers' in qs: headers = int(qs['headers'][-1]) if", "= parts.path path = '/'.join((path.rstrip('/'), 'gviz/tq')) qs = parse.parse_qs(parts.query) if", "re from six.moves.urllib import parse FROM_REGEX = re.compile(' from (\"http.*?\")',", "from six.moves.urllib import parse FROM_REGEX = re.compile(' from (\"http.*?\")', re.IGNORECASE)", "print_function from __future__ import unicode_literals from collections import OrderedDict from", "pyparsing.ParseException: # fallback to regex to extract from match =", "def extract_url(sql): try: return parse_sql(sql)['from'] except pyparsing.ParseException: # fallback to", "if parts.path.endswith('/edit'): path = parts.path[:-len('/edit')] else: path = parts.path path", "if 'gid' in qs: gid = qs['gid'][-1] if 'sheet' in", "= parts.fragment[len('gid='):] args = OrderedDict() if headers > 0: args['headers']", "'gviz/tq')) qs = parse.parse_qs(parts.query) if 'headers' in qs: headers =", "None, params, None)) def extract_url(sql): try: return parse_sql(sql)['from'] except pyparsing.ParseException:", "OrderedDict from moz_sql_parser import parse as parse_sql import pyparsing import", "'/'.join((path.rstrip('/'), 'gviz/tq')) qs = parse.parse_qs(parts.query) if 'headers' in qs: headers", "fallback to regex to extract from match = FROM_REGEX.search(sql) if", "import parse FROM_REGEX = re.compile(' from (\"http.*?\")', re.IGNORECASE) def get_url(url,", "from moz_sql_parser import parse as parse_sql import pyparsing import re", "if 'headers' in qs: headers = int(qs['headers'][-1]) if 'gid' in", "= parse.urlencode(args) return parse.urlunparse( (parts.scheme, parts.netloc, path, None, params, None))", "else: path = parts.path path = '/'.join((path.rstrip('/'), 'gviz/tq')) qs =", "moz_sql_parser import parse as parse_sql import pyparsing import re from", "parts.path.endswith('/edit'): path = parts.path[:-len('/edit')] else: path = parts.path path =", "path, None, params, None)) def extract_url(sql): try: return parse_sql(sql)['from'] except", "import absolute_import from __future__ import division from __future__ import print_function", "import parse as parse_sql import pyparsing import re from six.moves.urllib", "__future__ import unicode_literals from collections import OrderedDict from moz_sql_parser import", "parse FROM_REGEX = re.compile(' from (\"http.*?\")', re.IGNORECASE) def get_url(url, headers=0,", "to regex to extract from match = FROM_REGEX.search(sql) if match:" ]
[ "len(boxes) == 0 and len(labels) == 0: boxes.append([0, 0, 2,", "self.coco[\"images\"][idx] ) img_path = Path(self.img_folder) / ann_info[\"file_name\"].replace(\".png\", \".jpg\") ann_path =", "= Image.open(img_path).convert(\"RGB\") w, h = img.size if \"segments_info\" in ann_info:", "# Instead of finding boxes, just take the one from", "self.ann_file = ann_file self.transforms = transforms self.return_masks = return_masks def", "box_xywh_to_xyxy(x): xs, ys, w, h = x.unbind(-1) b = [xs,", "= img_info['width'] return height, width def build(image_set, args): root =", "/ ann_file dataset = ConstructionPanoptic( img_folder_path, ann_folder_path, ann_file, transforms=make_construction_transforms(image_set), return_masks=args.masks,", "build(image_set, args): root = Path(args.data_path) assert ( root.exists() ), f\"provided", "= masks_to_boxes(ann_info[\"segments_info\"]) target['labels'] = labels # Instead of finding boxes,", "4: boxes.append(ann[\"bbox\"]) area.append(ann['area']) else: boxes.append([0, 0, 2, 2]) area.append(4) labels.append(ann[\"category_id\"])", "w, h = img.size if \"segments_info\" in ann_info: masks =", "target[\"boxes\"] = boxes target['size'] = torch.as_tensor([int(h), int(w)]) target['orig_size'] = torch.as_tensor([int(h),", "area = torch.tensor(area) boxes = box_xywh_to_xyxy(boxes) return boxes, labels, iscrowd,", "# for name in ['iscrowd', 'area']: # target[name] = torch.tensor([ann[name]", "make_construction_transforms import logging def box_xywh_to_xyxy(x): xs, ys, w, h =", "= { \"train\": (\"images\", f\"{mode}\", f\"{mode}.json\"), \"val\": (\"images\", f\"val_{mode}\", f\"val_{mode}.json\"),", "[] for ann in segments: if len(ann[\"bbox\"]) == 4: boxes.append(ann[\"bbox\"])", "{} target['image_id'] = torch.tensor([ann_info['image_id'] if \"image_id\" in ann_info else ann_info[\"id\"]])", "iscrowd = torch.tensor(iscrowd) area = torch.tensor(area) boxes = box_xywh_to_xyxy(boxes) return", "iscrowd target['area'] = area # if \"segments_info\" in ann_info: #", "2, 2]) area.append(4) labels.append(ann[\"category_id\"]) iscrowd.append(ann['iscrowd']) if len(boxes) == 0 and", "path {root} does not exist\" mode = \"panoptic\" PATHS =", "ann_folder, ann_file, transforms=None, return_masks=True): with open(ann_file, \"r\") as f: self.coco", "logging.error(ann_info) raise e def __len__(self): return len(self.coco['images']) def get_height_and_width(self, idx):", "ys, w, h = x.unbind(-1) b = [xs, ys, (xs", ") target = {} target['image_id'] = torch.tensor([ann_info['image_id'] if \"image_id\" in", "# dtype=torch.int64, # ) target = {} target['image_id'] = torch.tensor([ann_info['image_id']", "\"segments_info\" in ann_info: # for name in ['iscrowd', 'area']: #", "= \"panoptic\" PATHS = { \"train\": (\"images\", f\"{mode}\", f\"{mode}.json\"), \"val\":", "0: boxes.append([0, 0, 2, 2]) labels.append(1) area.append(4) iscrowd.append(0) boxes =", "= torch.tensor(area) boxes = box_xywh_to_xyxy(boxes) return boxes, labels, iscrowd, area", "iscrowd.append(ann['iscrowd']) if len(boxes) == 0 and len(labels) == 0: boxes.append([0,", "self.coco[\"images\"] = sorted(self.coco[\"images\"], key=lambda x: x[\"id\"]) # sanity check if", "target[name] = torch.tensor([ann[name] for ann in ann_info['segments_info']]) if self.transforms is", "if \"segments_info\" in ann_info: # for name in ['iscrowd', 'area']:", "zip(self.coco[\"images\"], self.coco[\"annotations\"]): assert img[\"file_name\"][:-4] == ann[\"file_name\"][:-4] self.img_folder = img_folder self.ann_folder", "= img_folder self.ann_folder = ann_folder self.ann_file = ann_file self.transforms =", "return_masks=True): with open(ann_file, \"r\") as f: self.coco = json.load(f) #", "[ann[\"category_id\"] for ann in ann_info[\"segments_info\"]], # dtype=torch.int64, # ) target", "for name in ['iscrowd', 'area']: # target[name] = torch.tensor([ann[name] for", "+ w), (ys + h)] return torch.stack(b, dim=-1) def masks_to_boxes(segments):", "img_info = self.coco['images'][idx] height = img_info['height'] width = img_info['width'] return", "return len(self.coco['images']) def get_height_and_width(self, idx): img_info = self.coco['images'][idx] height =", "img_folder_path = root / img_folder ann_folder_path = root / ann_folder", "def build(image_set, args): root = Path(args.data_path) assert ( root.exists() ),", "= Path(self.img_folder) / ann_info[\"file_name\"].replace(\".png\", \".jpg\") ann_path = Path(self.ann_folder) / ann_info[\"file_name\"]", "finding boxes, just take the one from json info available", "= Path(self.ann_folder) / ann_info[\"file_name\"] img = Image.open(img_path).convert(\"RGB\") w, h =", "idx): img_info = self.coco['images'][idx] height = img_info['height'] width = img_info['width']", "= torch.tensor(iscrowd) area = torch.tensor(area) boxes = box_xywh_to_xyxy(boxes) return boxes,", "as np import torch from PIL import Image from panopticapi.utils", "ann_info['segments_info']]) if self.transforms is not None: img, target = self.transforms(img,", "len(self.coco['images']) def get_height_and_width(self, idx): img_info = self.coco['images'][idx] height = img_info['height']", "iscrowd.append(0) boxes = torch.tensor(boxes, dtype=torch.int64) labels = torch.tensor(labels, dtype=torch.int64) iscrowd", "boxes.append([0, 0, 2, 2]) labels.append(1) area.append(4) iscrowd.append(0) boxes = torch.tensor(boxes,", "img, target except Exception as e: logging.error(ann_info) raise e def", "'images' field so that they are aligned with 'annotations' #", "from PIL import Image from panopticapi.utils import rgb2id # from", "[] iscrowd = [] area = [] for ann in", "+ h)] return torch.stack(b, dim=-1) def masks_to_boxes(segments): boxes = []", "in ann_info: # for name in ['iscrowd', 'area']: # target[name]", "target except Exception as e: logging.error(ann_info) raise e def __len__(self):", "img.size if \"segments_info\" in ann_info: masks = np.asarray(Image.open(ann_path), dtype=np.uint32) masks", "= torch.as_tensor(masks, dtype=torch.uint8) # labels = torch.tensor( # [ann[\"category_id\"] for", "ann_info: masks = np.asarray(Image.open(ann_path), dtype=np.uint32) masks = rgb2id(masks) ids =", "if len(ann[\"bbox\"]) == 4: boxes.append(ann[\"bbox\"]) area.append(ann['area']) else: boxes.append([0, 0, 2,", "does not exist\" mode = \"panoptic\" PATHS = { \"train\":", "# labels = torch.tensor( # [ann[\"category_id\"] for ann in ann_info[\"segments_info\"]],", "= [xs, ys, (xs + w), (ys + h)] return", "[] labels = [] iscrowd = [] area = []", "if self.return_masks: target['masks'] = masks boxes, labels, iscrowd, area =", "= sorted(self.coco[\"images\"], key=lambda x: x[\"id\"]) # sanity check if \"annotations\"", "0, 2, 2]) labels.append(1) area.append(4) iscrowd.append(0) boxes = torch.tensor(boxes, dtype=torch.int64)", "# i.e., in alphabetical order self.coco[\"images\"] = sorted(self.coco[\"images\"], key=lambda x:", "in ['iscrowd', 'area']: # target[name] = torch.tensor([ann[name] for ann in", "in segments: if len(ann[\"bbox\"]) == 4: boxes.append(ann[\"bbox\"]) area.append(ann['area']) else: boxes.append([0,", "transforms=None, return_masks=True): with open(ann_file, \"r\") as f: self.coco = json.load(f)", "for img, ann in zip(self.coco[\"images\"], self.coco[\"annotations\"]): assert img[\"file_name\"][:-4] == ann[\"file_name\"][:-4]", "ann_folder ann_file = root / ann_file dataset = ConstructionPanoptic( img_folder_path,", ") img_path = Path(self.img_folder) / ann_info[\"file_name\"].replace(\".png\", \".jpg\") ann_path = Path(self.ann_folder)", "one from json info available # target[\"boxes\"] = masks_to_boxes(ann_info[\"segments_info\"]) target[\"boxes\"]", "in ann_info: masks = np.asarray(Image.open(ann_path), dtype=np.uint32) masks = rgb2id(masks) ids", "x.unbind(-1) b = [xs, ys, (xs + w), (ys +", "= root / ann_file dataset = ConstructionPanoptic( img_folder_path, ann_folder_path, ann_file,", "len(labels) == 0: boxes.append([0, 0, 2, 2]) labels.append(1) area.append(4) iscrowd.append(0)", "ann_file self.transforms = transforms self.return_masks = return_masks def __getitem__(self, idx):", "in ann_info['segments_info']]) if self.transforms is not None: img, target =", "PATHS = { \"train\": (\"images\", f\"{mode}\", f\"{mode}.json\"), \"val\": (\"images\", f\"val_{mode}\",", "ann_folder_path = root / ann_folder ann_file = root / ann_file", "return_masks def __getitem__(self, idx): try: ann_info = ( self.coco[\"annotations\"][idx] if", "img_folder self.ann_folder = ann_folder self.ann_file = ann_file self.transforms = transforms", "ann in segments: if len(ann[\"bbox\"]) == 4: boxes.append(ann[\"bbox\"]) area.append(ann['area']) else:", ".construction import make_construction_transforms import logging def box_xywh_to_xyxy(x): xs, ys, w,", "masks boxes, labels, iscrowd, area = masks_to_boxes(ann_info[\"segments_info\"]) target['labels'] = labels", "( self.coco[\"annotations\"][idx] if \"annotations\" in self.coco else self.coco[\"images\"][idx] ) img_path", "area.append(ann['area']) else: boxes.append([0, 0, 2, 2]) area.append(4) labels.append(ann[\"category_id\"]) iscrowd.append(ann['iscrowd']) if", "ann_path = Path(self.ann_folder) / ann_info[\"file_name\"] img = Image.open(img_path).convert(\"RGB\") w, h", "self.coco: for img, ann in zip(self.coco[\"images\"], self.coco[\"annotations\"]): assert img[\"file_name\"][:-4] ==", "boxes = torch.tensor(boxes, dtype=torch.int64) labels = torch.tensor(labels, dtype=torch.int64) iscrowd =", "for ann in ann_info[\"segments_info\"]], # dtype=torch.int64, # ) target =", "box_xywh_to_xyxy(boxes) return boxes, labels, iscrowd, area class ConstructionPanoptic: def __init__(self,", "return boxes, labels, iscrowd, area class ConstructionPanoptic: def __init__(self, img_folder,", "img_folder, ann_folder, ann_file = PATHS[image_set] img_folder_path = root / img_folder", "root / ann_file dataset = ConstructionPanoptic( img_folder_path, ann_folder_path, ann_file, transforms=make_construction_transforms(image_set),", "masks = rgb2id(masks) ids = np.array([ann[\"id\"] for ann in ann_info[\"segments_info\"]])", "= img_info['height'] width = img_info['width'] return height, width def build(image_set,", "Panoptic path {root} does not exist\" mode = \"panoptic\" PATHS", "def box_xywh_to_xyxy(x): xs, ys, w, h = x.unbind(-1) b =", "masks = torch.as_tensor(masks, dtype=torch.uint8) # labels = torch.tensor( # [ann[\"category_id\"]", "img, target = self.transforms(img, target) return img, target except Exception", "iscrowd = [] area = [] for ann in segments:", "in ann_info else ann_info[\"id\"]]) if self.return_masks: target['masks'] = masks boxes,", "exist\" mode = \"panoptic\" PATHS = { \"train\": (\"images\", f\"{mode}\",", "= ConstructionPanoptic( img_folder_path, ann_folder_path, ann_file, transforms=make_construction_transforms(image_set), return_masks=args.masks, ) return dataset", "is not None: img, target = self.transforms(img, target) return img,", "Exception as e: logging.error(ann_info) raise e def __len__(self): return len(self.coco['images'])", "All Rights Reserved import json from pathlib import Path import", "get_height_and_width(self, idx): img_info = self.coco['images'][idx] height = img_info['height'] width =", "= torch.as_tensor([int(h), int(w)]) target['orig_size'] = torch.as_tensor([int(h), int(w)]) target['iscrowd'] = iscrowd", "self.coco else self.coco[\"images\"][idx] ) img_path = Path(self.img_folder) / ann_info[\"file_name\"].replace(\".png\", \".jpg\")", "None: img, target = self.transforms(img, target) return img, target except", "e def __len__(self): return len(self.coco['images']) def get_height_and_width(self, idx): img_info =", "(xs + w), (ys + h)] return torch.stack(b, dim=-1) def", "return img, target except Exception as e: logging.error(ann_info) raise e", "width def build(image_set, args): root = Path(args.data_path) assert ( root.exists()", "ann in ann_info[\"segments_info\"]]) masks = masks == ids[:, None, None]", "2]) labels.append(1) area.append(4) iscrowd.append(0) boxes = torch.tensor(boxes, dtype=torch.int64) labels =", "torch.tensor(labels, dtype=torch.int64) iscrowd = torch.tensor(iscrowd) area = torch.tensor(area) boxes =", "# sanity check if \"annotations\" in self.coco: for img, ann", "from util.box_ops import masks_to_boxes from .construction import make_construction_transforms import logging", "iscrowd, area = masks_to_boxes(ann_info[\"segments_info\"]) target['labels'] = labels # Instead of", "img_path = Path(self.img_folder) / ann_info[\"file_name\"].replace(\".png\", \".jpg\") ann_path = Path(self.ann_folder) /", "in alphabetical order self.coco[\"images\"] = sorted(self.coco[\"images\"], key=lambda x: x[\"id\"]) #", "name in ['iscrowd', 'area']: # target[name] = torch.tensor([ann[name] for ann", "take the one from json info available # target[\"boxes\"] =", "available # target[\"boxes\"] = masks_to_boxes(ann_info[\"segments_info\"]) target[\"boxes\"] = boxes target['size'] =", "= labels # Instead of finding boxes, just take the", "dtype=torch.uint8) # labels = torch.tensor( # [ann[\"category_id\"] for ann in", "= rgb2id(masks) ids = np.array([ann[\"id\"] for ann in ann_info[\"segments_info\"]]) masks", "height = img_info['height'] width = img_info['width'] return height, width def", "Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved", "area # if \"segments_info\" in ann_info: # for name in", "return torch.stack(b, dim=-1) def masks_to_boxes(segments): boxes = [] labels =", "torch.tensor([ann_info['image_id'] if \"image_id\" in ann_info else ann_info[\"id\"]]) if self.return_masks: target['masks']", "torch.as_tensor([int(h), int(w)]) target['iscrowd'] = iscrowd target['area'] = area # if", "import json from pathlib import Path import numpy as np", "idx): try: ann_info = ( self.coco[\"annotations\"][idx] if \"annotations\" in self.coco", "x[\"id\"]) # sanity check if \"annotations\" in self.coco: for img,", "import Path import numpy as np import torch from PIL", "img = Image.open(img_path).convert(\"RGB\") w, h = img.size if \"segments_info\" in", "0 and len(labels) == 0: boxes.append([0, 0, 2, 2]) labels.append(1)", "labels.append(1) area.append(4) iscrowd.append(0) boxes = torch.tensor(boxes, dtype=torch.int64) labels = torch.tensor(labels,", "ann_info[\"id\"]]) if self.return_masks: target['masks'] = masks boxes, labels, iscrowd, area", "w), (ys + h)] return torch.stack(b, dim=-1) def masks_to_boxes(segments): boxes", "= root / img_folder ann_folder_path = root / ann_folder ann_file", "if len(boxes) == 0 and len(labels) == 0: boxes.append([0, 0,", "target['labels'] = labels # Instead of finding boxes, just take", "import torch from PIL import Image from panopticapi.utils import rgb2id", "ann_file, transforms=None, return_masks=True): with open(ann_file, \"r\") as f: self.coco =", "from json info available # target[\"boxes\"] = masks_to_boxes(ann_info[\"segments_info\"]) target[\"boxes\"] =", "from .construction import make_construction_transforms import logging def box_xywh_to_xyxy(x): xs, ys,", "masks_to_boxes(ann_info[\"segments_info\"]) target[\"boxes\"] = boxes target['size'] = torch.as_tensor([int(h), int(w)]) target['orig_size'] =", "== 0: boxes.append([0, 0, 2, 2]) labels.append(1) area.append(4) iscrowd.append(0) boxes", "ann in zip(self.coco[\"images\"], self.coco[\"annotations\"]): assert img[\"file_name\"][:-4] == ann[\"file_name\"][:-4] self.img_folder =", "masks_to_boxes(ann_info[\"segments_info\"]) target['labels'] = labels # Instead of finding boxes, just", "assert ( root.exists() ), f\"provided Panoptic path {root} does not", "self.transforms is not None: img, target = self.transforms(img, target) return", "in self.coco: for img, ann in zip(self.coco[\"images\"], self.coco[\"annotations\"]): assert img[\"file_name\"][:-4]", "masks = np.asarray(Image.open(ann_path), dtype=np.uint32) masks = rgb2id(masks) ids = np.array([ann[\"id\"]", "x: x[\"id\"]) # sanity check if \"annotations\" in self.coco: for", "with open(ann_file, \"r\") as f: self.coco = json.load(f) # sort", "ann_info[\"file_name\"] img = Image.open(img_path).convert(\"RGB\") w, h = img.size if \"segments_info\"", "so that they are aligned with 'annotations' # i.e., in", "boxes.append([0, 0, 2, 2]) area.append(4) labels.append(ann[\"category_id\"]) iscrowd.append(ann['iscrowd']) if len(boxes) ==", "root = Path(args.data_path) assert ( root.exists() ), f\"provided Panoptic path", "import masks_to_boxes from .construction import make_construction_transforms import logging def box_xywh_to_xyxy(x):", "= torch.tensor([ann_info['image_id'] if \"image_id\" in ann_info else ann_info[\"id\"]]) if self.return_masks:", "np.array([ann[\"id\"] for ann in ann_info[\"segments_info\"]]) masks = masks == ids[:,", "import numpy as np import torch from PIL import Image", "dtype=torch.int64) iscrowd = torch.tensor(iscrowd) area = torch.tensor(area) boxes = box_xywh_to_xyxy(boxes)", "\"image_id\" in ann_info else ann_info[\"id\"]]) if self.return_masks: target['masks'] = masks", "info available # target[\"boxes\"] = masks_to_boxes(ann_info[\"segments_info\"]) target[\"boxes\"] = boxes target['size']", "self.transforms(img, target) return img, target except Exception as e: logging.error(ann_info)", "ids = np.array([ann[\"id\"] for ann in ann_info[\"segments_info\"]]) masks = masks", "the one from json info available # target[\"boxes\"] = masks_to_boxes(ann_info[\"segments_info\"])", "# from util.box_ops import masks_to_boxes from .construction import make_construction_transforms import", "2]) area.append(4) labels.append(ann[\"category_id\"]) iscrowd.append(ann['iscrowd']) if len(boxes) == 0 and len(labels)", "'area']: # target[name] = torch.tensor([ann[name] for ann in ann_info['segments_info']]) if", "key=lambda x: x[\"id\"]) # sanity check if \"annotations\" in self.coco:", "\"val\": (\"images\", f\"val_{mode}\", f\"val_{mode}.json\"), } img_folder, ann_folder, ann_file = PATHS[image_set]", "np import torch from PIL import Image from panopticapi.utils import", "= x.unbind(-1) b = [xs, ys, (xs + w), (ys", "check if \"annotations\" in self.coco: for img, ann in zip(self.coco[\"images\"],", "json info available # target[\"boxes\"] = masks_to_boxes(ann_info[\"segments_info\"]) target[\"boxes\"] = boxes", "labels = torch.tensor( # [ann[\"category_id\"] for ann in ann_info[\"segments_info\"]], #", "that they are aligned with 'annotations' # i.e., in alphabetical", "self.coco['images'][idx] height = img_info['height'] width = img_info['width'] return height, width", "dataset = ConstructionPanoptic( img_folder_path, ann_folder_path, ann_file, transforms=make_construction_transforms(image_set), return_masks=args.masks, ) return", "for ann in segments: if len(ann[\"bbox\"]) == 4: boxes.append(ann[\"bbox\"]) area.append(ann['area'])", "field so that they are aligned with 'annotations' # i.e.,", "as e: logging.error(ann_info) raise e def __len__(self): return len(self.coco['images']) def", "util.box_ops import masks_to_boxes from .construction import make_construction_transforms import logging def", "= box_xywh_to_xyxy(boxes) return boxes, labels, iscrowd, area class ConstructionPanoptic: def", "dtype=torch.int64, # ) target = {} target['image_id'] = torch.tensor([ann_info['image_id'] if", "def get_height_and_width(self, idx): img_info = self.coco['images'][idx] height = img_info['height'] width", "# Copyright (c) Facebook, Inc. and its affiliates. All Rights", "labels, iscrowd, area = masks_to_boxes(ann_info[\"segments_info\"]) target['labels'] = labels # Instead", "Instead of finding boxes, just take the one from json", "= boxes target['size'] = torch.as_tensor([int(h), int(w)]) target['orig_size'] = torch.as_tensor([int(h), int(w)])", "panopticapi.utils import rgb2id # from util.box_ops import masks_to_boxes from .construction", "xs, ys, w, h = x.unbind(-1) b = [xs, ys,", "in zip(self.coco[\"images\"], self.coco[\"annotations\"]): assert img[\"file_name\"][:-4] == ann[\"file_name\"][:-4] self.img_folder = img_folder", "if \"image_id\" in ann_info else ann_info[\"id\"]]) if self.return_masks: target['masks'] =", "= torch.tensor(boxes, dtype=torch.int64) labels = torch.tensor(labels, dtype=torch.int64) iscrowd = torch.tensor(iscrowd)", "masks == ids[:, None, None] masks = torch.as_tensor(masks, dtype=torch.uint8) #", "self.coco = json.load(f) # sort 'images' field so that they", "target) return img, target except Exception as e: logging.error(ann_info) raise", "ann_file = PATHS[image_set] img_folder_path = root / img_folder ann_folder_path =", "\"panoptic\" PATHS = { \"train\": (\"images\", f\"{mode}\", f\"{mode}.json\"), \"val\": (\"images\",", "ann in ann_info['segments_info']]) if self.transforms is not None: img, target", "f\"{mode}\", f\"{mode}.json\"), \"val\": (\"images\", f\"val_{mode}\", f\"val_{mode}.json\"), } img_folder, ann_folder, ann_file", "sanity check if \"annotations\" in self.coco: for img, ann in", "def masks_to_boxes(segments): boxes = [] labels = [] iscrowd =", "ann_folder self.ann_file = ann_file self.transforms = transforms self.return_masks = return_masks", "= self.transforms(img, target) return img, target except Exception as e:", "ann_info[\"segments_info\"]]) masks = masks == ids[:, None, None] masks =", "labels # Instead of finding boxes, just take the one", "target[\"boxes\"] = masks_to_boxes(ann_info[\"segments_info\"]) target[\"boxes\"] = boxes target['size'] = torch.as_tensor([int(h), int(w)])", "boxes.append(ann[\"bbox\"]) area.append(ann['area']) else: boxes.append([0, 0, 2, 2]) area.append(4) labels.append(ann[\"category_id\"]) iscrowd.append(ann['iscrowd'])", "target = {} target['image_id'] = torch.tensor([ann_info['image_id'] if \"image_id\" in ann_info", "else: boxes.append([0, 0, 2, 2]) area.append(4) labels.append(ann[\"category_id\"]) iscrowd.append(ann['iscrowd']) if len(boxes)", "labels = [] iscrowd = [] area = [] for", "boxes, labels, iscrowd, area class ConstructionPanoptic: def __init__(self, img_folder, ann_folder,", "boxes, just take the one from json info available #", "= return_masks def __getitem__(self, idx): try: ann_info = ( self.coco[\"annotations\"][idx]", "else self.coco[\"images\"][idx] ) img_path = Path(self.img_folder) / ann_info[\"file_name\"].replace(\".png\", \".jpg\") ann_path", "ann_info else ann_info[\"id\"]]) if self.return_masks: target['masks'] = masks boxes, labels,", "= ann_folder self.ann_file = ann_file self.transforms = transforms self.return_masks =", "# ) target = {} target['image_id'] = torch.tensor([ann_info['image_id'] if \"image_id\"", "area = masks_to_boxes(ann_info[\"segments_info\"]) target['labels'] = labels # Instead of finding", "json.load(f) # sort 'images' field so that they are aligned", "class ConstructionPanoptic: def __init__(self, img_folder, ann_folder, ann_file, transforms=None, return_masks=True): with", "boxes, labels, iscrowd, area = masks_to_boxes(ann_info[\"segments_info\"]) target['labels'] = labels #", "0, 2, 2]) area.append(4) labels.append(ann[\"category_id\"]) iscrowd.append(ann['iscrowd']) if len(boxes) == 0", "img[\"file_name\"][:-4] == ann[\"file_name\"][:-4] self.img_folder = img_folder self.ann_folder = ann_folder self.ann_file", "args): root = Path(args.data_path) assert ( root.exists() ), f\"provided Panoptic", "from pathlib import Path import numpy as np import torch", "they are aligned with 'annotations' # i.e., in alphabetical order", "boxes = [] labels = [] iscrowd = [] area", "torch.stack(b, dim=-1) def masks_to_boxes(segments): boxes = [] labels = []", "# sort 'images' field so that they are aligned with", "sorted(self.coco[\"images\"], key=lambda x: x[\"id\"]) # sanity check if \"annotations\" in", "order self.coco[\"images\"] = sorted(self.coco[\"images\"], key=lambda x: x[\"id\"]) # sanity check", "f\"val_{mode}.json\"), } img_folder, ann_folder, ann_file = PATHS[image_set] img_folder_path = root", "json from pathlib import Path import numpy as np import", "import logging def box_xywh_to_xyxy(x): xs, ys, w, h = x.unbind(-1)", "raise e def __len__(self): return len(self.coco['images']) def get_height_and_width(self, idx): img_info", "if \"segments_info\" in ann_info: masks = np.asarray(Image.open(ann_path), dtype=np.uint32) masks =", "img_folder ann_folder_path = root / ann_folder ann_file = root /", "= ( self.coco[\"annotations\"][idx] if \"annotations\" in self.coco else self.coco[\"images\"][idx] )", "numpy as np import torch from PIL import Image from", "as f: self.coco = json.load(f) # sort 'images' field so", "{root} does not exist\" mode = \"panoptic\" PATHS = {", "None, None] masks = torch.as_tensor(masks, dtype=torch.uint8) # labels = torch.tensor(", "= {} target['image_id'] = torch.tensor([ann_info['image_id'] if \"image_id\" in ann_info else", "/ ann_folder ann_file = root / ann_file dataset = ConstructionPanoptic(", "# target[name] = torch.tensor([ann[name] for ann in ann_info['segments_info']]) if self.transforms", "target['orig_size'] = torch.as_tensor([int(h), int(w)]) target['iscrowd'] = iscrowd target['area'] = area", "root / img_folder ann_folder_path = root / ann_folder ann_file =", "root.exists() ), f\"provided Panoptic path {root} does not exist\" mode", "import rgb2id # from util.box_ops import masks_to_boxes from .construction import", "int(w)]) target['orig_size'] = torch.as_tensor([int(h), int(w)]) target['iscrowd'] = iscrowd target['area'] =", "of finding boxes, just take the one from json info", "PIL import Image from panopticapi.utils import rgb2id # from util.box_ops", "len(ann[\"bbox\"]) == 4: boxes.append(ann[\"bbox\"]) area.append(ann['area']) else: boxes.append([0, 0, 2, 2])", "self.coco[\"annotations\"][idx] if \"annotations\" in self.coco else self.coco[\"images\"][idx] ) img_path =", "# if \"segments_info\" in ann_info: # for name in ['iscrowd',", "= json.load(f) # sort 'images' field so that they are", "labels = torch.tensor(labels, dtype=torch.int64) iscrowd = torch.tensor(iscrowd) area = torch.tensor(area)", "affiliates. All Rights Reserved import json from pathlib import Path", "h)] return torch.stack(b, dim=-1) def masks_to_boxes(segments): boxes = [] labels", "\"annotations\" in self.coco: for img, ann in zip(self.coco[\"images\"], self.coco[\"annotations\"]): assert", "Image from panopticapi.utils import rgb2id # from util.box_ops import masks_to_boxes", "= masks boxes, labels, iscrowd, area = masks_to_boxes(ann_info[\"segments_info\"]) target['labels'] =", "int(w)]) target['iscrowd'] = iscrowd target['area'] = area # if \"segments_info\"", "def __len__(self): return len(self.coco['images']) def get_height_and_width(self, idx): img_info = self.coco['images'][idx]", "= np.array([ann[\"id\"] for ann in ann_info[\"segments_info\"]]) masks = masks ==", "if self.transforms is not None: img, target = self.transforms(img, target)", "ann_file dataset = ConstructionPanoptic( img_folder_path, ann_folder_path, ann_file, transforms=make_construction_transforms(image_set), return_masks=args.masks, )", "open(ann_file, \"r\") as f: self.coco = json.load(f) # sort 'images'", "ann in ann_info[\"segments_info\"]], # dtype=torch.int64, # ) target = {}", "masks = masks == ids[:, None, None] masks = torch.as_tensor(masks,", "= torch.tensor([ann[name] for ann in ann_info['segments_info']]) if self.transforms is not", "np.asarray(Image.open(ann_path), dtype=np.uint32) masks = rgb2id(masks) ids = np.array([ann[\"id\"] for ann", "torch.as_tensor(masks, dtype=torch.uint8) # labels = torch.tensor( # [ann[\"category_id\"] for ann", "None] masks = torch.as_tensor(masks, dtype=torch.uint8) # labels = torch.tensor( #", "= [] for ann in segments: if len(ann[\"bbox\"]) == 4:", "ann[\"file_name\"][:-4] self.img_folder = img_folder self.ann_folder = ann_folder self.ann_file = ann_file", "\".jpg\") ann_path = Path(self.ann_folder) / ann_info[\"file_name\"] img = Image.open(img_path).convert(\"RGB\") w,", "torch.tensor(area) boxes = box_xywh_to_xyxy(boxes) return boxes, labels, iscrowd, area class", "[] area = [] for ann in segments: if len(ann[\"bbox\"])", "else ann_info[\"id\"]]) if self.return_masks: target['masks'] = masks boxes, labels, iscrowd,", "sort 'images' field so that they are aligned with 'annotations'", "self.img_folder = img_folder self.ann_folder = ann_folder self.ann_file = ann_file self.transforms", "= Path(args.data_path) assert ( root.exists() ), f\"provided Panoptic path {root}", "\"segments_info\" in ann_info: masks = np.asarray(Image.open(ann_path), dtype=np.uint32) masks = rgb2id(masks)", "= [] iscrowd = [] area = [] for ann", "PATHS[image_set] img_folder_path = root / img_folder ann_folder_path = root /", "# [ann[\"category_id\"] for ann in ann_info[\"segments_info\"]], # dtype=torch.int64, # )", "torch from PIL import Image from panopticapi.utils import rgb2id #", "dim=-1) def masks_to_boxes(segments): boxes = [] labels = [] iscrowd", "2, 2]) labels.append(1) area.append(4) iscrowd.append(0) boxes = torch.tensor(boxes, dtype=torch.int64) labels", "'annotations' # i.e., in alphabetical order self.coco[\"images\"] = sorted(self.coco[\"images\"], key=lambda", "= img.size if \"segments_info\" in ann_info: masks = np.asarray(Image.open(ann_path), dtype=np.uint32)", "== 4: boxes.append(ann[\"bbox\"]) area.append(ann['area']) else: boxes.append([0, 0, 2, 2]) area.append(4)", "\"annotations\" in self.coco else self.coco[\"images\"][idx] ) img_path = Path(self.img_folder) /", "boxes = box_xywh_to_xyxy(boxes) return boxes, labels, iscrowd, area class ConstructionPanoptic:", "in self.coco else self.coco[\"images\"][idx] ) img_path = Path(self.img_folder) / ann_info[\"file_name\"].replace(\".png\",", "Facebook, Inc. and its affiliates. All Rights Reserved import json", "assert img[\"file_name\"][:-4] == ann[\"file_name\"][:-4] self.img_folder = img_folder self.ann_folder = ann_folder", "if \"annotations\" in self.coco: for img, ann in zip(self.coco[\"images\"], self.coco[\"annotations\"]):", "== 0 and len(labels) == 0: boxes.append([0, 0, 2, 2])", "= area # if \"segments_info\" in ann_info: # for name", "__len__(self): return len(self.coco['images']) def get_height_and_width(self, idx): img_info = self.coco['images'][idx] height", "e: logging.error(ann_info) raise e def __len__(self): return len(self.coco['images']) def get_height_and_width(self,", "masks_to_boxes(segments): boxes = [] labels = [] iscrowd = []", "Path(self.ann_folder) / ann_info[\"file_name\"] img = Image.open(img_path).convert(\"RGB\") w, h = img.size", "alphabetical order self.coco[\"images\"] = sorted(self.coco[\"images\"], key=lambda x: x[\"id\"]) # sanity", "img, ann in zip(self.coco[\"images\"], self.coco[\"annotations\"]): assert img[\"file_name\"][:-4] == ann[\"file_name\"][:-4] self.img_folder", "area = [] for ann in segments: if len(ann[\"bbox\"]) ==", "torch.tensor(iscrowd) area = torch.tensor(area) boxes = box_xywh_to_xyxy(boxes) return boxes, labels,", "dtype=torch.int64) labels = torch.tensor(labels, dtype=torch.int64) iscrowd = torch.tensor(iscrowd) area =", "= self.coco['images'][idx] height = img_info['height'] width = img_info['width'] return height,", "w, h = x.unbind(-1) b = [xs, ys, (xs +", "self.ann_folder = ann_folder self.ann_file = ann_file self.transforms = transforms self.return_masks", "= [] area = [] for ann in segments: if", "from panopticapi.utils import rgb2id # from util.box_ops import masks_to_boxes from", "logging def box_xywh_to_xyxy(x): xs, ys, w, h = x.unbind(-1) b", "masks_to_boxes from .construction import make_construction_transforms import logging def box_xywh_to_xyxy(x): xs,", "and its affiliates. All Rights Reserved import json from pathlib", "def __getitem__(self, idx): try: ann_info = ( self.coco[\"annotations\"][idx] if \"annotations\"", "__init__(self, img_folder, ann_folder, ann_file, transforms=None, return_masks=True): with open(ann_file, \"r\") as", "(\"images\", f\"val_{mode}\", f\"val_{mode}.json\"), } img_folder, ann_folder, ann_file = PATHS[image_set] img_folder_path", "{ \"train\": (\"images\", f\"{mode}\", f\"{mode}.json\"), \"val\": (\"images\", f\"val_{mode}\", f\"val_{mode}.json\"), }", "with 'annotations' # i.e., in alphabetical order self.coco[\"images\"] = sorted(self.coco[\"images\"],", "Image.open(img_path).convert(\"RGB\") w, h = img.size if \"segments_info\" in ann_info: masks", "labels, iscrowd, area class ConstructionPanoptic: def __init__(self, img_folder, ann_folder, ann_file,", "= iscrowd target['area'] = area # if \"segments_info\" in ann_info:", "), f\"provided Panoptic path {root} does not exist\" mode =", "i.e., in alphabetical order self.coco[\"images\"] = sorted(self.coco[\"images\"], key=lambda x: x[\"id\"])", "= transforms self.return_masks = return_masks def __getitem__(self, idx): try: ann_info", "Path(args.data_path) assert ( root.exists() ), f\"provided Panoptic path {root} does", "self.return_masks: target['masks'] = masks boxes, labels, iscrowd, area = masks_to_boxes(ann_info[\"segments_info\"])", "ConstructionPanoptic: def __init__(self, img_folder, ann_folder, ann_file, transforms=None, return_masks=True): with open(ann_file,", "(ys + h)] return torch.stack(b, dim=-1) def masks_to_boxes(segments): boxes =", "= torch.as_tensor([int(h), int(w)]) target['iscrowd'] = iscrowd target['area'] = area #", "= masks == ids[:, None, None] masks = torch.as_tensor(masks, dtype=torch.uint8)", "f\"{mode}.json\"), \"val\": (\"images\", f\"val_{mode}\", f\"val_{mode}.json\"), } img_folder, ann_folder, ann_file =", "[xs, ys, (xs + w), (ys + h)] return torch.stack(b,", "== ids[:, None, None] masks = torch.as_tensor(masks, dtype=torch.uint8) # labels", "f: self.coco = json.load(f) # sort 'images' field so that", "(c) Facebook, Inc. and its affiliates. All Rights Reserved import", "torch.tensor([ann[name] for ann in ann_info['segments_info']]) if self.transforms is not None:", "} img_folder, ann_folder, ann_file = PATHS[image_set] img_folder_path = root /", "torch.tensor(boxes, dtype=torch.int64) labels = torch.tensor(labels, dtype=torch.int64) iscrowd = torch.tensor(iscrowd) area", "boxes target['size'] = torch.as_tensor([int(h), int(w)]) target['orig_size'] = torch.as_tensor([int(h), int(w)]) target['iscrowd']", "img_folder, ann_folder, ann_file, transforms=None, return_masks=True): with open(ann_file, \"r\") as f:", "in ann_info[\"segments_info\"]]) masks = masks == ids[:, None, None] masks", "ids[:, None, None] masks = torch.as_tensor(masks, dtype=torch.uint8) # labels =", "__getitem__(self, idx): try: ann_info = ( self.coco[\"annotations\"][idx] if \"annotations\" in", "transforms self.return_masks = return_masks def __getitem__(self, idx): try: ann_info =", "= torch.tensor(labels, dtype=torch.int64) iscrowd = torch.tensor(iscrowd) area = torch.tensor(area) boxes", "self.return_masks = return_masks def __getitem__(self, idx): try: ann_info = (", "img_info['height'] width = img_info['width'] return height, width def build(image_set, args):", "/ img_folder ann_folder_path = root / ann_folder ann_file = root", "def __init__(self, img_folder, ann_folder, ann_file, transforms=None, return_masks=True): with open(ann_file, \"r\")", "pathlib import Path import numpy as np import torch from", "ys, (xs + w), (ys + h)] return torch.stack(b, dim=-1)", "/ ann_info[\"file_name\"].replace(\".png\", \".jpg\") ann_path = Path(self.ann_folder) / ann_info[\"file_name\"] img =", "just take the one from json info available # target[\"boxes\"]", "target['area'] = area # if \"segments_info\" in ann_info: # for", "try: ann_info = ( self.coco[\"annotations\"][idx] if \"annotations\" in self.coco else", "= torch.tensor( # [ann[\"category_id\"] for ann in ann_info[\"segments_info\"]], # dtype=torch.int64,", "( root.exists() ), f\"provided Panoptic path {root} does not exist\"", "area class ConstructionPanoptic: def __init__(self, img_folder, ann_folder, ann_file, transforms=None, return_masks=True):", "torch.as_tensor([int(h), int(w)]) target['orig_size'] = torch.as_tensor([int(h), int(w)]) target['iscrowd'] = iscrowd target['area']", "= ann_file self.transforms = transforms self.return_masks = return_masks def __getitem__(self,", "= masks_to_boxes(ann_info[\"segments_info\"]) target[\"boxes\"] = boxes target['size'] = torch.as_tensor([int(h), int(w)]) target['orig_size']", "ann_file = root / ann_file dataset = ConstructionPanoptic( img_folder_path, ann_folder_path,", "ann_info: # for name in ['iscrowd', 'area']: # target[name] =", "aligned with 'annotations' # i.e., in alphabetical order self.coco[\"images\"] =", "Reserved import json from pathlib import Path import numpy as", "mode = \"panoptic\" PATHS = { \"train\": (\"images\", f\"{mode}\", f\"{mode}.json\"),", "width = img_info['width'] return height, width def build(image_set, args): root", "except Exception as e: logging.error(ann_info) raise e def __len__(self): return", "labels.append(ann[\"category_id\"]) iscrowd.append(ann['iscrowd']) if len(boxes) == 0 and len(labels) == 0:", "if \"annotations\" in self.coco else self.coco[\"images\"][idx] ) img_path = Path(self.img_folder)", "self.coco[\"annotations\"]): assert img[\"file_name\"][:-4] == ann[\"file_name\"][:-4] self.img_folder = img_folder self.ann_folder =", "f\"provided Panoptic path {root} does not exist\" mode = \"panoptic\"", "return height, width def build(image_set, args): root = Path(args.data_path) assert", "ann_folder, ann_file = PATHS[image_set] img_folder_path = root / img_folder ann_folder_path", "Path import numpy as np import torch from PIL import", "for ann in ann_info[\"segments_info\"]]) masks = masks == ids[:, None,", "f\"val_{mode}\", f\"val_{mode}.json\"), } img_folder, ann_folder, ann_file = PATHS[image_set] img_folder_path =", "for ann in ann_info['segments_info']]) if self.transforms is not None: img,", "ann_info[\"file_name\"].replace(\".png\", \".jpg\") ann_path = Path(self.ann_folder) / ann_info[\"file_name\"] img = Image.open(img_path).convert(\"RGB\")", "Path(self.img_folder) / ann_info[\"file_name\"].replace(\".png\", \".jpg\") ann_path = Path(self.ann_folder) / ann_info[\"file_name\"] img", "# target[\"boxes\"] = masks_to_boxes(ann_info[\"segments_info\"]) target[\"boxes\"] = boxes target['size'] = torch.as_tensor([int(h),", "area.append(4) labels.append(ann[\"category_id\"]) iscrowd.append(ann['iscrowd']) if len(boxes) == 0 and len(labels) ==", "target['image_id'] = torch.tensor([ann_info['image_id'] if \"image_id\" in ann_info else ann_info[\"id\"]]) if", "rgb2id # from util.box_ops import masks_to_boxes from .construction import make_construction_transforms", "h = img.size if \"segments_info\" in ann_info: masks = np.asarray(Image.open(ann_path),", "in ann_info[\"segments_info\"]], # dtype=torch.int64, # ) target = {} target['image_id']", "Inc. and its affiliates. All Rights Reserved import json from", "not None: img, target = self.transforms(img, target) return img, target", "not exist\" mode = \"panoptic\" PATHS = { \"train\": (\"images\",", "area.append(4) iscrowd.append(0) boxes = torch.tensor(boxes, dtype=torch.int64) labels = torch.tensor(labels, dtype=torch.int64)", "/ ann_info[\"file_name\"] img = Image.open(img_path).convert(\"RGB\") w, h = img.size if", "img_info['width'] return height, width def build(image_set, args): root = Path(args.data_path)", "b = [xs, ys, (xs + w), (ys + h)]", "= root / ann_folder ann_file = root / ann_file dataset", "iscrowd, area class ConstructionPanoptic: def __init__(self, img_folder, ann_folder, ann_file, transforms=None,", "height, width def build(image_set, args): root = Path(args.data_path) assert (", "root / ann_folder ann_file = root / ann_file dataset =", "ann_info[\"segments_info\"]], # dtype=torch.int64, # ) target = {} target['image_id'] =", "Rights Reserved import json from pathlib import Path import numpy", "target['size'] = torch.as_tensor([int(h), int(w)]) target['orig_size'] = torch.as_tensor([int(h), int(w)]) target['iscrowd'] =", "= PATHS[image_set] img_folder_path = root / img_folder ann_folder_path = root", "import make_construction_transforms import logging def box_xywh_to_xyxy(x): xs, ys, w, h", "target['masks'] = masks boxes, labels, iscrowd, area = masks_to_boxes(ann_info[\"segments_info\"]) target['labels']", "(\"images\", f\"{mode}\", f\"{mode}.json\"), \"val\": (\"images\", f\"val_{mode}\", f\"val_{mode}.json\"), } img_folder, ann_folder,", "\"r\") as f: self.coco = json.load(f) # sort 'images' field", "= np.asarray(Image.open(ann_path), dtype=np.uint32) masks = rgb2id(masks) ids = np.array([ann[\"id\"] for", "segments: if len(ann[\"bbox\"]) == 4: boxes.append(ann[\"bbox\"]) area.append(ann['area']) else: boxes.append([0, 0,", "self.transforms = transforms self.return_masks = return_masks def __getitem__(self, idx): try:", "['iscrowd', 'area']: # target[name] = torch.tensor([ann[name] for ann in ann_info['segments_info']])", "import Image from panopticapi.utils import rgb2id # from util.box_ops import", "rgb2id(masks) ids = np.array([ann[\"id\"] for ann in ann_info[\"segments_info\"]]) masks =", "ann_info = ( self.coco[\"annotations\"][idx] if \"annotations\" in self.coco else self.coco[\"images\"][idx]", "h = x.unbind(-1) b = [xs, ys, (xs + w),", "= [] labels = [] iscrowd = [] area =", "its affiliates. All Rights Reserved import json from pathlib import", "and len(labels) == 0: boxes.append([0, 0, 2, 2]) labels.append(1) area.append(4)", "== ann[\"file_name\"][:-4] self.img_folder = img_folder self.ann_folder = ann_folder self.ann_file =", "are aligned with 'annotations' # i.e., in alphabetical order self.coco[\"images\"]", "target['iscrowd'] = iscrowd target['area'] = area # if \"segments_info\" in", "dtype=np.uint32) masks = rgb2id(masks) ids = np.array([ann[\"id\"] for ann in", "\"train\": (\"images\", f\"{mode}\", f\"{mode}.json\"), \"val\": (\"images\", f\"val_{mode}\", f\"val_{mode}.json\"), } img_folder,", "target = self.transforms(img, target) return img, target except Exception as", "torch.tensor( # [ann[\"category_id\"] for ann in ann_info[\"segments_info\"]], # dtype=torch.int64, #" ]
[ "middle = left + (right - left) // 2 if", "equals to nth-item \"\"\" if len(array) <= index: return index", "search recursively here if left is None and right is", "0, len(array) - 1 if len(array) == 0: return None", "O(n) because you are returning the function continuously until index", "return middle elif item > array[middle]: left = middle +", "1, right) else: return binary_search_recursive(array, item, left, middle - 1)", "item): \"\"\"return the index of item in sorted array or", "TIME AND SPACE COMPLEXITY!!!!! \"\"\" def linear_search(array, item): \"\"\"return the", "+ 1 else: right = middle - 1 return None", "def binary_search_recursive(array, item, left=None, right=None): \"\"\"Time Complexity: O(log*n) Space Complexity:", "Complexity: O(log*n) Space Complexity: 0(log*n) recursion call stack space\"\"\" #", "FUNCTIONS WITH TIME AND SPACE COMPLEXITY!!!!! \"\"\" def linear_search(array, item):", "the function continuously until index equals to nth-item \"\"\" if", "returning the function continuously until index equals to nth-item \"\"\"", "are returning the function continuously until index equals to nth-item", "> array[middle]: left = middle + 1 else: right =", "Space Complexity: 0(log*n) recursion call stack space\"\"\" # TODO: implement", "0(log*n) recursion call stack space\"\"\" # TODO: implement binary search", "until item is found for index, value in enumerate(array): #O(n)", "return index # found O(1) return None # not found", "left, right = 0, len(array) - 1 middle = left", "== 0: return None while left <= right: middle =", "Complexity: 0(log*n) recursion call stack space\"\"\" # TODO: implement binary", "binary_search(array, item): \"\"\"return the index of item in sorted array", "#!python \"\"\" ANNOTATE FUNCTIONS WITH TIME AND SPACE COMPLEXITY!!!!! \"\"\"", "over all array values until item is found for index,", "are n amount of items\"\"\" # loop over all array", "while left <= right: middle = left + (right -", "return index if array[index] == item: return index else: return", "O(1) def linear_search_recursive(array, item, index=0): \"\"\"Time complexity: O(n) because you", "continuously until index equals to nth-item \"\"\" if len(array) <=", "None # not found O(1) def linear_search_recursive(array, item, index=0): \"\"\"Time", "# return binary_search_recursive(array, item) def binary_search_iterative(array, item): \"\"\"Time Complexity: O(log*n)", "# TODO: implement binary search recursively here if left is", "(right - left) // 2 if item == array[middle]: return", "item == value: #O(1) return index # found O(1) return", "index=0): \"\"\"Time complexity: O(n) because you are returning the function", "COMPLEXITY!!!!! \"\"\" def linear_search(array, item): \"\"\"return the first index of", "O(n) because you iterate through n amount of items in", "array[middle]: return middle elif item > array[middle]: left = middle", "first index of item in array or None if item", "if item is not found\"\"\" return binary_search_iterative(array, item) # return", "\"\"\"Time complexity: O(n) because you are returning the function continuously", "else: return linear_search_recursive(array, item, index + 1) def binary_search(array, item):", "are constantly dividing the length of array by 2 until", "array[middle]: left = middle + 1 else: right = middle", "1 return None def binary_search_recursive(array, item, left=None, right=None): \"\"\"Time Complexity:", "space\"\"\" # TODO: implement binary search recursively here if left", "found\"\"\" return linear_search_iterative(array, item) # return linear_search_recursive(array, item) def linear_search_iterative(array,", "right = 0, len(array) - 1 if len(array) == 0:", "index equals to nth-item \"\"\" if len(array) <= index: return", "left) // 2 if item == array[middle]: return middle elif", "array[middle]: return binary_search_recursive(array, item, middle + 1, right) else: return", "until index equals to nth-item \"\"\" if len(array) <= index:", "\"\"\" ANNOTATE FUNCTIONS WITH TIME AND SPACE COMPLEXITY!!!!! \"\"\" def", "function continuously until index equals to nth-item \"\"\" if len(array)", "binary_search_recursive(array, item, middle + 1, right) else: return binary_search_recursive(array, item,", "left + (right - left) // 2 if item ==", "item) def linear_search_iterative(array, item): \"\"\"Time complexity: O(n) because you iterate", "array[middle] == item: return middle elif item > array[middle]: return", "#O(1) return index # found O(1) return None # not", "recursively here if left is None and right is None:", "item > array[middle]: return binary_search_recursive(array, item, middle + 1, right)", "dividing the length of array by 2 until array length", "constantly dividing the length of array by 2 until array", "middle elif item > array[middle]: left = middle + 1", "array[index] == item: return index else: return linear_search_recursive(array, item, index", "\"\"\"Time Complexity: O(log*n) because you are constantly dividing the length", "(right - left) // 2 if left > right: return", "return index else: return linear_search_recursive(array, item, index + 1) def", "len(array) - 1 if len(array) == 0: return None while", "length of array by 2 until array length is 1", "loop over all array values until item is found for", "stack space\"\"\" # TODO: implement binary search recursively here if", "1) def binary_search(array, item): \"\"\"return the index of item in", "if array[middle] == item: return middle elif item > array[middle]:", "value: #O(1) return index # found O(1) return None #", "the index of item in sorted array or None if", "Complexity: O(log*n) because you are constantly dividing the length of", "item) def binary_search_iterative(array, item): \"\"\"Time Complexity: O(log*n) because you are", "there are n amount of items\"\"\" # loop over all", "left + (right - left) // 2 if left >", "not found O(1) def linear_search_recursive(array, item, index=0): \"\"\"Time complexity: O(n)", "index + 1) def binary_search(array, item): \"\"\"return the index of", "return None if array[middle] == item: return middle elif item", "left=None, right=None): \"\"\"Time Complexity: O(log*n) Space Complexity: 0(log*n) recursion call", "else: right = middle - 1 return None def binary_search_recursive(array,", "item, index + 1) def binary_search(array, item): \"\"\"return the index", "or None if item is not found\"\"\" return linear_search_iterative(array, item)", "found O(1) return None # not found O(1) def linear_search_recursive(array,", "def binary_search_iterative(array, item): \"\"\"Time Complexity: O(log*n) because you are constantly", "index if array[index] == item: return index else: return linear_search_recursive(array,", "amount of items\"\"\" # loop over all array values until", "here if left is None and right is None: left,", "+ (right - left) // 2 if left > right:", "you are returning the function continuously until index equals to", "item): \"\"\"Time complexity: O(n) because you iterate through n amount", "- left) // 2 if left > right: return None", "item is not found\"\"\" return binary_search_iterative(array, item) # return binary_search_recursive(array,", "right=None): \"\"\"Time Complexity: O(log*n) Space Complexity: 0(log*n) recursion call stack", "middle + 1, right) else: return binary_search_recursive(array, item, left, middle", "array length is 1 Space Complexity: O(1) \"\"\" left, right", "in array Space Complexity: O(n) because there are n amount", "iterate through n amount of items in array Space Complexity:", "because you iterate through n amount of items in array", "return linear_search_iterative(array, item) # return linear_search_recursive(array, item) def linear_search_iterative(array, item):", "return None # not found O(1) def linear_search_recursive(array, item, index=0):", "None if item is not found\"\"\" return binary_search_iterative(array, item) #", "== item: return middle elif item > array[middle]: return binary_search_recursive(array,", "linear_search_recursive(array, item, index + 1) def binary_search(array, item): \"\"\"return the", "of item in sorted array or None if item is", "// 2 if left > right: return None if array[middle]", "def binary_search(array, item): \"\"\"return the index of item in sorted", "def linear_search_iterative(array, item): \"\"\"Time complexity: O(n) because you iterate through", "1 if len(array) == 0: return None while left <=", "the length of array by 2 until array length is", "left <= right: middle = left + (right - left)", "sorted array or None if item is not found\"\"\" return", "index: return index if array[index] == item: return index else:", "of items\"\"\" # loop over all array values until item", "- 1 middle = left + (right - left) //", "n amount of items\"\"\" # loop over all array values", "1 middle = left + (right - left) // 2", "O(1) \"\"\" left, right = 0, len(array) - 1 if", "\"\"\"return the first index of item in array or None", "of items in array Space Complexity: O(n) because there are", "\"\"\"return the index of item in sorted array or None", "SPACE COMPLEXITY!!!!! \"\"\" def linear_search(array, item): \"\"\"return the first index", "for index, value in enumerate(array): #O(n) if item == value:", "O(n) because there are n amount of items\"\"\" # loop", "binary_search_recursive(array, item) def binary_search_iterative(array, item): \"\"\"Time Complexity: O(log*n) because you", "- 1 return None def binary_search_recursive(array, item, left=None, right=None): \"\"\"Time", "+ 1, right) else: return binary_search_recursive(array, item, left, middle -", "item) # return linear_search_recursive(array, item) def linear_search_iterative(array, item): \"\"\"Time complexity:", "in sorted array or None if item is not found\"\"\"", "the first index of item in array or None if", "found for index, value in enumerate(array): #O(n) if item ==", "0: return None while left <= right: middle = left", "return binary_search_recursive(array, item, middle + 1, right) else: return binary_search_recursive(array,", "array by 2 until array length is 1 Space Complexity:", "length is 1 Space Complexity: O(1) \"\"\" left, right =", "ANNOTATE FUNCTIONS WITH TIME AND SPACE COMPLEXITY!!!!! \"\"\" def linear_search(array,", "# found O(1) return None # not found O(1) def", "middle + 1 else: right = middle - 1 return", "index, value in enumerate(array): #O(n) if item == value: #O(1)", "middle - 1 return None def binary_search_recursive(array, item, left=None, right=None):", "item: return middle elif item > array[middle]: return binary_search_recursive(array, item,", "right: middle = left + (right - left) // 2", "\"\"\" def linear_search(array, item): \"\"\"return the first index of item", "because you are returning the function continuously until index equals", "item in sorted array or None if item is not", "// 2 if item == array[middle]: return middle elif item", "= middle - 1 return None def binary_search_recursive(array, item, left=None,", "return None while left <= right: middle = left +", "= left + (right - left) // 2 if item", "items in array Space Complexity: O(n) because there are n", "left, right = 0, len(array) - 1 if len(array) ==", "left) // 2 if left > right: return None if", "None if array[middle] == item: return middle elif item >", "found\"\"\" return binary_search_iterative(array, item) # return binary_search_recursive(array, item) def binary_search_iterative(array,", "+ (right - left) // 2 if item == array[middle]:", "\"\"\"Time Complexity: O(log*n) Space Complexity: 0(log*n) recursion call stack space\"\"\"", "item == array[middle]: return middle elif item > array[middle]: left", "in array or None if item is not found\"\"\" return", "2 until array length is 1 Space Complexity: O(1) \"\"\"", "right = middle - 1 return None def binary_search_recursive(array, item,", "is None and right is None: left, right = 0,", "def linear_search_recursive(array, item, index=0): \"\"\"Time complexity: O(n) because you are", "index of item in sorted array or None if item", "= 0, len(array) - 1 if len(array) == 0: return", "amount of items in array Space Complexity: O(n) because there", "return binary_search_iterative(array, item) # return binary_search_recursive(array, item) def binary_search_iterative(array, item):", "of item in array or None if item is not", "right is None: left, right = 0, len(array) - 1", "by 2 until array length is 1 Space Complexity: O(1)", "of array by 2 until array length is 1 Space", "if array[index] == item: return index else: return linear_search_recursive(array, item,", "array or None if item is not found\"\"\" return binary_search_iterative(array,", "right: return None if array[middle] == item: return middle elif", "item is not found\"\"\" return linear_search_iterative(array, item) # return linear_search_recursive(array,", "you are constantly dividing the length of array by 2", "if item == value: #O(1) return index # found O(1)", "binary_search_iterative(array, item): \"\"\"Time Complexity: O(log*n) because you are constantly dividing", "index else: return linear_search_recursive(array, item, index + 1) def binary_search(array,", "item) # return binary_search_recursive(array, item) def binary_search_iterative(array, item): \"\"\"Time Complexity:", "not found\"\"\" return linear_search_iterative(array, item) # return linear_search_recursive(array, item) def", "left > right: return None if array[middle] == item: return", "or None if item is not found\"\"\" return binary_search_iterative(array, item)", "== value: #O(1) return index # found O(1) return None", "return binary_search_recursive(array, item) def binary_search_iterative(array, item): \"\"\"Time Complexity: O(log*n) because", "return linear_search_recursive(array, item) def linear_search_iterative(array, item): \"\"\"Time complexity: O(n) because", "binary search recursively here if left is None and right", "item, left=None, right=None): \"\"\"Time Complexity: O(log*n) Space Complexity: 0(log*n) recursion", "None: left, right = 0, len(array) - 1 middle =", "to nth-item \"\"\" if len(array) <= index: return index if", "return None def binary_search_recursive(array, item, left=None, right=None): \"\"\"Time Complexity: O(log*n)", "# loop over all array values until item is found", "is found for index, value in enumerate(array): #O(n) if item", "#O(n) if item == value: #O(1) return index # found", "return linear_search_recursive(array, item, index + 1) def binary_search(array, item): \"\"\"return", "1 Space Complexity: O(1) \"\"\" left, right = 0, len(array)", "> array[middle]: return binary_search_recursive(array, item, middle + 1, right) else:", "Complexity: O(n) because there are n amount of items\"\"\" #", "nth-item \"\"\" if len(array) <= index: return index if array[index]", "array Space Complexity: O(n) because there are n amount of", "len(array) <= index: return index if array[index] == item: return", "+ 1) def binary_search(array, item): \"\"\"return the index of item", "len(array) - 1 middle = left + (right - left)", "all array values until item is found for index, value", "because there are n amount of items\"\"\" # loop over", "None def binary_search_recursive(array, item, left=None, right=None): \"\"\"Time Complexity: O(log*n) Space", "Space Complexity: O(1) \"\"\" left, right = 0, len(array) -", "linear_search(array, item): \"\"\"return the first index of item in array", "if item is not found\"\"\" return linear_search_iterative(array, item) # return", "O(1) return None # not found O(1) def linear_search_recursive(array, item,", "WITH TIME AND SPACE COMPLEXITY!!!!! \"\"\" def linear_search(array, item): \"\"\"return", "\"\"\" if len(array) <= index: return index if array[index] ==", "= left + (right - left) // 2 if left", "> right: return None if array[middle] == item: return middle", "left is None and right is None: left, right =", "right = 0, len(array) - 1 middle = left +", "item, middle + 1, right) else: return binary_search_recursive(array, item, left,", "1 else: right = middle - 1 return None def", "O(log*n) Space Complexity: 0(log*n) recursion call stack space\"\"\" # TODO:", "values until item is found for index, value in enumerate(array):", "len(array) == 0: return None while left <= right: middle", "elif item > array[middle]: left = middle + 1 else:", "Space Complexity: O(n) because there are n amount of items\"\"\"", "through n amount of items in array Space Complexity: O(n)", "\"\"\" left, right = 0, len(array) - 1 if len(array)", "None if item is not found\"\"\" return linear_search_iterative(array, item) #", "enumerate(array): #O(n) if item == value: #O(1) return index #", "item: return index else: return linear_search_recursive(array, item, index + 1)", "if len(array) <= index: return index if array[index] == item:", "in enumerate(array): #O(n) if item == value: #O(1) return index", "is not found\"\"\" return binary_search_iterative(array, item) # return binary_search_recursive(array, item)", "array or None if item is not found\"\"\" return linear_search_iterative(array,", "complexity: O(n) because you are returning the function continuously until", "<= right: middle = left + (right - left) //", "if left > right: return None if array[middle] == item:", "is not found\"\"\" return linear_search_iterative(array, item) # return linear_search_recursive(array, item)", "AND SPACE COMPLEXITY!!!!! \"\"\" def linear_search(array, item): \"\"\"return the first", "if len(array) == 0: return None while left <= right:", "binary_search_recursive(array, item, left=None, right=None): \"\"\"Time Complexity: O(log*n) Space Complexity: 0(log*n)", "array values until item is found for index, value in", "until array length is 1 Space Complexity: O(1) \"\"\" left,", "middle elif item > array[middle]: return binary_search_recursive(array, item, middle +", "item): \"\"\"Time Complexity: O(log*n) because you are constantly dividing the", "= 0, len(array) - 1 middle = left + (right", "def linear_search(array, item): \"\"\"return the first index of item in", "TODO: implement binary search recursively here if left is None", "# return linear_search_recursive(array, item) def linear_search_iterative(array, item): \"\"\"Time complexity: O(n)", "linear_search_iterative(array, item) # return linear_search_recursive(array, item) def linear_search_iterative(array, item): \"\"\"Time", "found O(1) def linear_search_recursive(array, item, index=0): \"\"\"Time complexity: O(n) because", "- 1 if len(array) == 0: return None while left", "left = middle + 1 else: right = middle -", "None and right is None: left, right = 0, len(array)", "2 if left > right: return None if array[middle] ==", "<= index: return index if array[index] == item: return index", "index of item in array or None if item is", "= middle + 1 else: right = middle - 1", "complexity: O(n) because you iterate through n amount of items", "None while left <= right: middle = left + (right", "item): \"\"\"return the first index of item in array or", "index # found O(1) return None # not found O(1)", "n amount of items in array Space Complexity: O(n) because", "linear_search_recursive(array, item, index=0): \"\"\"Time complexity: O(n) because you are returning", "implement binary search recursively here if left is None and", "2 if item == array[middle]: return middle elif item >", "recursion call stack space\"\"\" # TODO: implement binary search recursively", "and right is None: left, right = 0, len(array) -", "0, len(array) - 1 middle = left + (right -", "is 1 Space Complexity: O(1) \"\"\" left, right = 0,", "you iterate through n amount of items in array Space", "item, index=0): \"\"\"Time complexity: O(n) because you are returning the", "\"\"\"Time complexity: O(n) because you iterate through n amount of", "not found\"\"\" return binary_search_iterative(array, item) # return binary_search_recursive(array, item) def", "Complexity: O(1) \"\"\" left, right = 0, len(array) - 1", "- left) // 2 if item == array[middle]: return middle", "linear_search_iterative(array, item): \"\"\"Time complexity: O(n) because you iterate through n", "item is found for index, value in enumerate(array): #O(n) if", "if item == array[middle]: return middle elif item > array[middle]:", "== array[middle]: return middle elif item > array[middle]: left =", "== item: return index else: return linear_search_recursive(array, item, index +", "elif item > array[middle]: return binary_search_recursive(array, item, middle + 1,", "return middle elif item > array[middle]: return binary_search_recursive(array, item, middle", "linear_search_recursive(array, item) def linear_search_iterative(array, item): \"\"\"Time complexity: O(n) because you", "item > array[middle]: left = middle + 1 else: right", "# not found O(1) def linear_search_recursive(array, item, index=0): \"\"\"Time complexity:", "items\"\"\" # loop over all array values until item is", "because you are constantly dividing the length of array by", "O(log*n) because you are constantly dividing the length of array", "call stack space\"\"\" # TODO: implement binary search recursively here", "if left is None and right is None: left, right", "item in array or None if item is not found\"\"\"", "binary_search_iterative(array, item) # return binary_search_recursive(array, item) def binary_search_iterative(array, item): \"\"\"Time", "value in enumerate(array): #O(n) if item == value: #O(1) return", "is None: left, right = 0, len(array) - 1 middle" ]
[ "b, c, dt)) RM.commit() def update_memory(self, a, b): cursor.execute('''UPDATE memory", "into memory (concept, location, person,DATE) values (?, ?, ?, ?)\",", "= ? WHERE concept = ? ''', (b, a)) RM.commit()", "from pattern.en import pprint from pattern.en import parsetree from pattern.en", "(b, a)) RM.commit() def search_memory(self, a): cursor.execute('''SELECT concept,location, person FROM", "what/how is 'concept' self.reference = 'none' # unused self.locality =", "3rd-person, singular self.isProperNoun = False # True if proper noun:", "# what/how is 'concept' self.reference = 'none' # unused self.locality", "a, b): c = '3sg' dt = datetime.now() RM.execute(\"insert into", "single concept class conceptClass: def __init__(self, state='none', locality='none'): self.state =", "holds raw input and memories in parse taged columns '''", "location = ? WHERE concept = ? ''', (b, a))", "memory def known(self, c): cursor.execute('''SELECT concept, location FROM memory WHERE", "conceptClass: def __init__(self, state='none', locality='none'): self.state = state # what/how", "datetime from pattern.en import parse from pattern.en import pprint from", "= conceptClass() if c in self.person: self.concepts[c].person = self.person[c] else:", "def search_profile(self, a): cursor.execute('''SELECT value FROM profile WHERE item =?''',", "memory WHERE concept =?''', (c,)) user = cursor.fetchone() # if", "# Information about a single concept class conceptClass: def __init__(self,", "c): cursor.execute('''SELECT concept, location FROM memory WHERE concept =?''', (c,))", "== 'None': return user def add_memory(self, a, b): c =", "add oncept to raw_input table in robbie_memory # x= #", "(?, ?)\",(c, dt)) # RM.commit() self.concepts[c] = conceptClass() if c", "= RM.cursor() # Information about a single concept class conceptClass:", "cursor.fetchone() return user def search_profile(self, a): cursor.execute('''SELECT value FROM profile", "sqlite3.connect(dir +'robbie_memory.sqlite') #RM = sqlite3.connect(dir + '/data/robbie_memory.db') cursor = RM.cursor()", "= datetime.now() # RM.execute(\"insert into RAW_INPUT (RAW, DATE) values (?,", "'3sg' dt = datetime.now() RM.execute(\"insert into memory (concept, location, person,DATE)", "self.locality = locality # where is 'concept' self.person = '3sg'", "memory class stored in sqlite data base holds raw input", "# where is 'concept' self.person = '3sg' # e.g. a", "= sqlite3.connect(dir + '/data/robbie_memory.db') cursor = RM.cursor() # Information about", "import sys import re import sqlite3 import os from datetime", "{'1sg': 'my', '2sg': 'your', '3sg': 'its' } # Add a", "import pprint from pattern.en import parsetree from pattern.en import wordnet", "state='none', locality='none'): self.state = state # what/how is 'concept' self.reference", "Robbie memory class. Collection of concepts class memoryClass(): def __init__(self):", "?)\", (a, b, c, dt)) RM.commit() def update_memory(self, a, b):", "from datetime import date, datetime from pattern.en import parse from", "RM = sqlite3.connect(dir +'robbie_memory.sqlite') #RM = sqlite3.connect(dir + '/data/robbie_memory.db') cursor", "concept,location, person FROM memory WHERE concept =?''', (a,)) user =", "RM.commit() def search_memory(self, a): cursor.execute('''SELECT concept,location, person FROM memory WHERE", "''' memory class stored in sqlite data base holds raw", "'2sg': 'your', '3sg': 'its' } # Add a concept to", "SET location = ? WHERE concept = ? ''', (b,", "from pattern.en import conjugate, lemma, lexeme #dir = os.path.dirname(os.path.abspath(__file__)) dir", "taged columns ''' import sys import re import sqlite3 import", "= state # what/how is 'concept' self.reference = 'none' #", "FROM memory WHERE concept =?''', (c,)) user = cursor.fetchone() #", "date, datetime from pattern.en import parse from pattern.en import pprint", "FROM profile WHERE item =?''', (a,)) user = cursor.fetchone() return", "parsetree from pattern.en import wordnet from pattern.en import pluralize, singularize", "'3sg' # e.g. a thing is 3rd-person, singular self.isProperNoun =", "class conceptClass: def __init__(self, state='none', locality='none'): self.state = state #", "'my', '2sg': 'your', '3sg': 'its' } # Add a concept", "self.person = {'I': '1sg', 'you': '2sg' } self.posessivePronouns = {'1sg':", "= os.path.dirname(os.path.abspath(__file__)) dir = '/home/erni/catkin_ws/src/max_ros/max_ai/src/max_ai/' RM = sqlite3.connect(dir +'robbie_memory.sqlite') #RM", "pattern.en import parsetree from pattern.en import wordnet from pattern.en import", "c = '3sg' dt = datetime.now() RM.execute(\"insert into memory (concept,", "(a,)) user = cursor.fetchone() return user def Dump(self): return (self.concepts.state)", "lexeme #dir = os.path.dirname(os.path.abspath(__file__)) dir = '/home/erni/catkin_ws/src/max_ros/max_ai/src/max_ai/' RM = sqlite3.connect(dir", "} self.posessivePronouns = {'1sg': 'my', '2sg': 'your', '3sg': 'its' }", "thing is 3rd-person, singular self.isProperNoun = False # True if", "import conjugate, lemma, lexeme #dir = os.path.dirname(os.path.abspath(__file__)) dir = '/home/erni/catkin_ws/src/max_ros/max_ai/src/max_ai/'", "def __init__(self, state='none', locality='none'): self.state = state # what/how is", "is 'concept' self.person = '3sg' # e.g. a thing is", "re import sqlite3 import os from datetime import date, datetime", "self.isProperNoun = False # True if proper noun: e.g. Robert", "__init__(self, state='none', locality='none'): self.state = state # what/how is 'concept'", "Add a concept to memory def add(self, c): # add", "raw_input table in robbie_memory # x= # dt = datetime.now()", "# unused self.locality = locality # where is 'concept' self.person", "= ? ''', (b, a)) RM.commit() def search_memory(self, a): cursor.execute('''SELECT", "data base holds raw input and memories in parse taged", "import os from datetime import date, datetime from pattern.en import", "location FROM memory WHERE concept =?''', (c,)) user = cursor.fetchone()", "dt)) # RM.commit() self.concepts[c] = conceptClass() if c in self.person:", "table in robbie_memory # x= # dt = datetime.now() #", "concepts class memoryClass(): def __init__(self): self.concepts = {} self.person =", "unused self.locality = locality # where is 'concept' self.person =", "from pattern.en import pluralize, singularize from pattern.en import conjugate, lemma,", "'concept' self.reference = 'none' # unused self.locality = locality #", "'none' # unused self.locality = locality # where is 'concept'", "39, 'color' = 'blue' # Robbie memory class. Collection of", "values (?, ?, ?, ?)\", (a, b, c, dt)) RM.commit()", "= '3sg' # e.g. a thing is 3rd-person, singular self.isProperNoun", "# RM.commit() self.concepts[c] = conceptClass() if c in self.person: self.concepts[c].person", "'3sg' # Return True if concept 'c' (string) is in", "= 'none' # unused self.locality = locality # where is", "# True if proper noun: e.g. Robert self.properties = {}", "#dir = os.path.dirname(os.path.abspath(__file__)) dir = '/home/erni/catkin_ws/src/max_ros/max_ai/src/max_ai/' RM = sqlite3.connect(dir +'robbie_memory.sqlite')", "into RAW_INPUT (RAW, DATE) values (?, ?)\",(c, dt)) # RM.commit()", "is 'concept' self.reference = 'none' # unused self.locality = locality", "? WHERE concept = ? ''', (b, a)) RM.commit() def", "''', (b, a)) RM.commit() def search_memory(self, a): cursor.execute('''SELECT concept,location, person", "#!/usr/bin/python ''' memory class stored in sqlite data base holds", "{} # Dict of custom properties, e.g. 'age' = 39,", "'color' = 'blue' # Robbie memory class. Collection of concepts", "Robert self.properties = {} # Dict of custom properties, e.g.", "= locality # where is 'concept' self.person = '3sg' #", "is 3rd-person, singular self.isProperNoun = False # True if proper", "memory def add(self, c): # add oncept to raw_input table", "self.state = state # what/how is 'concept' self.reference = 'none'", "if user == 'None': return user def add_memory(self, a, b):", "= False # True if proper noun: e.g. Robert self.properties", "person FROM memory WHERE concept =?''', (a,)) user = cursor.fetchone()", "class. Collection of concepts class memoryClass(): def __init__(self): self.concepts =", "memories in parse taged columns ''' import sys import re", "(string) is in memory def known(self, c): cursor.execute('''SELECT concept, location", "RM.commit() def update_memory(self, a, b): cursor.execute('''UPDATE memory SET location =", "'you': '2sg' } self.posessivePronouns = {'1sg': 'my', '2sg': 'your', '3sg':", "c): # add oncept to raw_input table in robbie_memory #", "in robbie_memory # x= # dt = datetime.now() # RM.execute(\"insert", "memory WHERE concept =?''', (a,)) user = cursor.fetchone() return user", "add_memory(self, a, b): c = '3sg' dt = datetime.now() RM.execute(\"insert", "sqlite data base holds raw input and memories in parse", "from pattern.en import parsetree from pattern.en import wordnet from pattern.en", "sqlite3.connect(dir + '/data/robbie_memory.db') cursor = RM.cursor() # Information about a", "memory class. Collection of concepts class memoryClass(): def __init__(self): self.concepts", "wordnet from pattern.en import pluralize, singularize from pattern.en import conjugate,", "pprint from pattern.en import parsetree from pattern.en import wordnet from", "lemma, lexeme #dir = os.path.dirname(os.path.abspath(__file__)) dir = '/home/erni/catkin_ws/src/max_ros/max_ai/src/max_ai/' RM =", "user def add_memory(self, a, b): c = '3sg' dt =", "parse taged columns ''' import sys import re import sqlite3", "# add oncept to raw_input table in robbie_memory # x=", "self.properties = {} # Dict of custom properties, e.g. 'age'", "(a,)) user = cursor.fetchone() return user def search_profile(self, a): cursor.execute('''SELECT", "user = cursor.fetchone() return user def search_profile(self, a): cursor.execute('''SELECT value", "Collection of concepts class memoryClass(): def __init__(self): self.concepts = {}", "+ '/data/robbie_memory.db') cursor = RM.cursor() # Information about a single", "# e.g. a thing is 3rd-person, singular self.isProperNoun = False", "dt)) RM.commit() def update_memory(self, a, b): cursor.execute('''UPDATE memory SET location", "value FROM profile WHERE item =?''', (a,)) user = cursor.fetchone()", "# Add a concept to memory def add(self, c): #", "'3sg': 'its' } # Add a concept to memory def", "=?''', (a,)) user = cursor.fetchone() return user def Dump(self): return", "RAW_INPUT (RAW, DATE) values (?, ?)\",(c, dt)) # RM.commit() self.concepts[c]", "user == 'None': return user def add_memory(self, a, b): c", "# Return True if concept 'c' (string) is in memory", "update_memory(self, a, b): cursor.execute('''UPDATE memory SET location = ? WHERE", "?, ?)\", (a, b, c, dt)) RM.commit() def update_memory(self, a,", "a): cursor.execute('''SELECT concept,location, person FROM memory WHERE concept =?''', (a,))", "in memory def known(self, c): cursor.execute('''SELECT concept, location FROM memory", "b): cursor.execute('''UPDATE memory SET location = ? WHERE concept =", "WHERE concept =?''', (a,)) user = cursor.fetchone() return user def", "(concept, location, person,DATE) values (?, ?, ?, ?)\", (a, b,", "__init__(self): self.concepts = {} self.person = {'I': '1sg', 'you': '2sg'", "'1sg', 'you': '2sg' } self.posessivePronouns = {'1sg': 'my', '2sg': 'your',", "sqlite3 import os from datetime import date, datetime from pattern.en", "= {} self.person = {'I': '1sg', 'you': '2sg' } self.posessivePronouns", "FROM memory WHERE concept =?''', (a,)) user = cursor.fetchone() return", "RM.commit() self.concepts[c] = conceptClass() if c in self.person: self.concepts[c].person =", "memoryClass(): def __init__(self): self.concepts = {} self.person = {'I': '1sg',", "True if proper noun: e.g. Robert self.properties = {} #", "'blue' # Robbie memory class. Collection of concepts class memoryClass():", "state # what/how is 'concept' self.reference = 'none' # unused", "values (?, ?)\",(c, dt)) # RM.commit() self.concepts[c] = conceptClass() if", "concept class conceptClass: def __init__(self, state='none', locality='none'): self.state = state", "e.g. 'age' = 39, 'color' = 'blue' # Robbie memory", "pluralize, singularize from pattern.en import conjugate, lemma, lexeme #dir =", "''' import sys import re import sqlite3 import os from", "Dict of custom properties, e.g. 'age' = 39, 'color' =", "dt = datetime.now() RM.execute(\"insert into memory (concept, location, person,DATE) values", "datetime.now() # RM.execute(\"insert into RAW_INPUT (RAW, DATE) values (?, ?)\",(c,", "self.reference = 'none' # unused self.locality = locality # where", "item =?''', (a,)) user = cursor.fetchone() return user def Dump(self):", "datetime.now() RM.execute(\"insert into memory (concept, location, person,DATE) values (?, ?,", "= cursor.fetchone() return user def search_profile(self, a): cursor.execute('''SELECT value FROM", "where is 'concept' self.person = '3sg' # e.g. a thing", "False # True if proper noun: e.g. Robert self.properties =", "profile WHERE item =?''', (a,)) user = cursor.fetchone() return user", "#RM = sqlite3.connect(dir + '/data/robbie_memory.db') cursor = RM.cursor() # Information", "= '3sg' # Return True if concept 'c' (string) is", "WHERE concept = ? ''', (b, a)) RM.commit() def search_memory(self,", "dt = datetime.now() # RM.execute(\"insert into RAW_INPUT (RAW, DATE) values", "# Robbie memory class. Collection of concepts class memoryClass(): def", "self.concepts[c].person = '3sg' # Return True if concept 'c' (string)", "self.person = '3sg' # e.g. a thing is 3rd-person, singular", "(RAW, DATE) values (?, ?)\",(c, dt)) # RM.commit() self.concepts[c] =", "search_profile(self, a): cursor.execute('''SELECT value FROM profile WHERE item =?''', (a,))", "'its' } # Add a concept to memory def add(self,", "# if user == 'None': return user def add_memory(self, a,", "cursor.execute('''UPDATE memory SET location = ? WHERE concept = ?", "RM.execute(\"insert into RAW_INPUT (RAW, DATE) values (?, ?)\",(c, dt)) #", "True if concept 'c' (string) is in memory def known(self,", "# dt = datetime.now() # RM.execute(\"insert into RAW_INPUT (RAW, DATE)", "pattern.en import pprint from pattern.en import parsetree from pattern.en import", "if c in self.person: self.concepts[c].person = self.person[c] else: self.concepts[c].person =", "{} self.person = {'I': '1sg', 'you': '2sg' } self.posessivePronouns =", "stored in sqlite data base holds raw input and memories", "import re import sqlite3 import os from datetime import date,", "= '3sg' dt = datetime.now() RM.execute(\"insert into memory (concept, location,", "e.g. Robert self.properties = {} # Dict of custom properties,", "= datetime.now() RM.execute(\"insert into memory (concept, location, person,DATE) values (?,", "memory SET location = ? WHERE concept = ? ''',", "= self.person[c] else: self.concepts[c].person = '3sg' # Return True if", "memory (concept, location, person,DATE) values (?, ?, ?, ?)\", (a,", "if concept 'c' (string) is in memory def known(self, c):", "e.g. a thing is 3rd-person, singular self.isProperNoun = False #", "if proper noun: e.g. Robert self.properties = {} # Dict", "?)\",(c, dt)) # RM.commit() self.concepts[c] = conceptClass() if c in", "cursor = RM.cursor() # Information about a single concept class", "# Dict of custom properties, e.g. 'age' = 39, 'color'", "= 'blue' # Robbie memory class. Collection of concepts class", "from pattern.en import wordnet from pattern.en import pluralize, singularize from", "def known(self, c): cursor.execute('''SELECT concept, location FROM memory WHERE concept", "(?, ?, ?, ?)\", (a, b, c, dt)) RM.commit() def", "return user def add_memory(self, a, b): c = '3sg' dt", "else: self.concepts[c].person = '3sg' # Return True if concept 'c'", "self.person: self.concepts[c].person = self.person[c] else: self.concepts[c].person = '3sg' # Return", "c in self.person: self.concepts[c].person = self.person[c] else: self.concepts[c].person = '3sg'", "os.path.dirname(os.path.abspath(__file__)) dir = '/home/erni/catkin_ws/src/max_ros/max_ai/src/max_ai/' RM = sqlite3.connect(dir +'robbie_memory.sqlite') #RM =", "'/home/erni/catkin_ws/src/max_ros/max_ai/src/max_ai/' RM = sqlite3.connect(dir +'robbie_memory.sqlite') #RM = sqlite3.connect(dir + '/data/robbie_memory.db')", "= {} # Dict of custom properties, e.g. 'age' =", "c, dt)) RM.commit() def update_memory(self, a, b): cursor.execute('''UPDATE memory SET", "Return True if concept 'c' (string) is in memory def", "pattern.en import parse from pattern.en import pprint from pattern.en import", "is in memory def known(self, c): cursor.execute('''SELECT concept, location FROM", "WHERE concept =?''', (c,)) user = cursor.fetchone() # if user", "about a single concept class conceptClass: def __init__(self, state='none', locality='none'):", "= '/home/erni/catkin_ws/src/max_ros/max_ai/src/max_ai/' RM = sqlite3.connect(dir +'robbie_memory.sqlite') #RM = sqlite3.connect(dir +", "?, ?, ?)\", (a, b, c, dt)) RM.commit() def update_memory(self,", "in self.person: self.concepts[c].person = self.person[c] else: self.concepts[c].person = '3sg' #", "pattern.en import pluralize, singularize from pattern.en import conjugate, lemma, lexeme", "oncept to raw_input table in robbie_memory # x= # dt", "= 39, 'color' = 'blue' # Robbie memory class. Collection", "{'I': '1sg', 'you': '2sg' } self.posessivePronouns = {'1sg': 'my', '2sg':", "def add_memory(self, a, b): c = '3sg' dt = datetime.now()", "= {'1sg': 'my', '2sg': 'your', '3sg': 'its' } # Add", "concept =?''', (c,)) user = cursor.fetchone() # if user ==", "Information about a single concept class conceptClass: def __init__(self, state='none',", "WHERE item =?''', (a,)) user = cursor.fetchone() return user def", "import wordnet from pattern.en import pluralize, singularize from pattern.en import", "search_memory(self, a): cursor.execute('''SELECT concept,location, person FROM memory WHERE concept =?''',", "and memories in parse taged columns ''' import sys import", "'concept' self.person = '3sg' # e.g. a thing is 3rd-person,", "(a, b, c, dt)) RM.commit() def update_memory(self, a, b): cursor.execute('''UPDATE", "import parsetree from pattern.en import wordnet from pattern.en import pluralize,", "=?''', (c,)) user = cursor.fetchone() # if user == 'None':", "a single concept class conceptClass: def __init__(self, state='none', locality='none'): self.state", "# RM.execute(\"insert into RAW_INPUT (RAW, DATE) values (?, ?)\",(c, dt))", "to raw_input table in robbie_memory # x= # dt =", "cursor.execute('''SELECT concept,location, person FROM memory WHERE concept =?''', (a,)) user", "custom properties, e.g. 'age' = 39, 'color' = 'blue' #", "+'robbie_memory.sqlite') #RM = sqlite3.connect(dir + '/data/robbie_memory.db') cursor = RM.cursor() #", "self.concepts[c] = conceptClass() if c in self.person: self.concepts[c].person = self.person[c]", "concept 'c' (string) is in memory def known(self, c): cursor.execute('''SELECT", "locality='none'): self.state = state # what/how is 'concept' self.reference =", "'/data/robbie_memory.db') cursor = RM.cursor() # Information about a single concept", "os from datetime import date, datetime from pattern.en import parse", "class stored in sqlite data base holds raw input and", "cursor.fetchone() # if user == 'None': return user def add_memory(self,", "robbie_memory # x= # dt = datetime.now() # RM.execute(\"insert into", "=?''', (a,)) user = cursor.fetchone() return user def search_profile(self, a):", "? ''', (b, a)) RM.commit() def search_memory(self, a): cursor.execute('''SELECT concept,location,", "properties, e.g. 'age' = 39, 'color' = 'blue' # Robbie", "cursor.execute('''SELECT value FROM profile WHERE item =?''', (a,)) user =", "input and memories in parse taged columns ''' import sys", "def __init__(self): self.concepts = {} self.person = {'I': '1sg', 'you':", "pattern.en import wordnet from pattern.en import pluralize, singularize from pattern.en", "# x= # dt = datetime.now() # RM.execute(\"insert into RAW_INPUT", "a concept to memory def add(self, c): # add oncept", "user = cursor.fetchone() # if user == 'None': return user", "self.posessivePronouns = {'1sg': 'my', '2sg': 'your', '3sg': 'its' } #", "= sqlite3.connect(dir +'robbie_memory.sqlite') #RM = sqlite3.connect(dir + '/data/robbie_memory.db') cursor =", "in parse taged columns ''' import sys import re import", "a): cursor.execute('''SELECT value FROM profile WHERE item =?''', (a,)) user", "RM.cursor() # Information about a single concept class conceptClass: def", "locality # where is 'concept' self.person = '3sg' # e.g.", "concept =?''', (a,)) user = cursor.fetchone() return user def search_profile(self,", "known(self, c): cursor.execute('''SELECT concept, location FROM memory WHERE concept =?''',", "a)) RM.commit() def search_memory(self, a): cursor.execute('''SELECT concept,location, person FROM memory", "def search_memory(self, a): cursor.execute('''SELECT concept,location, person FROM memory WHERE concept", "RM.execute(\"insert into memory (concept, location, person,DATE) values (?, ?, ?,", "noun: e.g. Robert self.properties = {} # Dict of custom", "singular self.isProperNoun = False # True if proper noun: e.g.", "cursor.execute('''SELECT concept, location FROM memory WHERE concept =?''', (c,)) user", "from pattern.en import parse from pattern.en import pprint from pattern.en", "concept to memory def add(self, c): # add oncept to", "pattern.en import conjugate, lemma, lexeme #dir = os.path.dirname(os.path.abspath(__file__)) dir =", "class memoryClass(): def __init__(self): self.concepts = {} self.person = {'I':", "concept, location FROM memory WHERE concept =?''', (c,)) user =", "x= # dt = datetime.now() # RM.execute(\"insert into RAW_INPUT (RAW,", "a, b): cursor.execute('''UPDATE memory SET location = ? WHERE concept", "= {'I': '1sg', 'you': '2sg' } self.posessivePronouns = {'1sg': 'my',", "add(self, c): # add oncept to raw_input table in robbie_memory", "user def search_profile(self, a): cursor.execute('''SELECT value FROM profile WHERE item", "of custom properties, e.g. 'age' = 39, 'color' = 'blue'", "} # Add a concept to memory def add(self, c):", "'None': return user def add_memory(self, a, b): c = '3sg'", "DATE) values (?, ?)\",(c, dt)) # RM.commit() self.concepts[c] = conceptClass()", "import sqlite3 import os from datetime import date, datetime from", "dir = '/home/erni/catkin_ws/src/max_ros/max_ai/src/max_ai/' RM = sqlite3.connect(dir +'robbie_memory.sqlite') #RM = sqlite3.connect(dir", "to memory def add(self, c): # add oncept to raw_input", "(c,)) user = cursor.fetchone() # if user == 'None': return", "return user def search_profile(self, a): cursor.execute('''SELECT value FROM profile WHERE", "'age' = 39, 'color' = 'blue' # Robbie memory class.", "def add(self, c): # add oncept to raw_input table in", "of concepts class memoryClass(): def __init__(self): self.concepts = {} self.person", "location, person,DATE) values (?, ?, ?, ?)\", (a, b, c,", "a thing is 3rd-person, singular self.isProperNoun = False # True", "b): c = '3sg' dt = datetime.now() RM.execute(\"insert into memory", "import pluralize, singularize from pattern.en import conjugate, lemma, lexeme #dir", "self.concepts[c].person = self.person[c] else: self.concepts[c].person = '3sg' # Return True", "import parse from pattern.en import pprint from pattern.en import parsetree", "raw input and memories in parse taged columns ''' import", "parse from pattern.en import pprint from pattern.en import parsetree from", "= cursor.fetchone() # if user == 'None': return user def", "sys import re import sqlite3 import os from datetime import", "person,DATE) values (?, ?, ?, ?)\", (a, b, c, dt))", "proper noun: e.g. Robert self.properties = {} # Dict of", "import date, datetime from pattern.en import parse from pattern.en import", "self.person[c] else: self.concepts[c].person = '3sg' # Return True if concept", "conjugate, lemma, lexeme #dir = os.path.dirname(os.path.abspath(__file__)) dir = '/home/erni/catkin_ws/src/max_ros/max_ai/src/max_ai/' RM", "concept = ? ''', (b, a)) RM.commit() def search_memory(self, a):", "'2sg' } self.posessivePronouns = {'1sg': 'my', '2sg': 'your', '3sg': 'its'", "in sqlite data base holds raw input and memories in", "'your', '3sg': 'its' } # Add a concept to memory", "base holds raw input and memories in parse taged columns", "def update_memory(self, a, b): cursor.execute('''UPDATE memory SET location = ?", "conceptClass() if c in self.person: self.concepts[c].person = self.person[c] else: self.concepts[c].person", "'c' (string) is in memory def known(self, c): cursor.execute('''SELECT concept,", "self.concepts = {} self.person = {'I': '1sg', 'you': '2sg' }", "columns ''' import sys import re import sqlite3 import os", "datetime import date, datetime from pattern.en import parse from pattern.en", "singularize from pattern.en import conjugate, lemma, lexeme #dir = os.path.dirname(os.path.abspath(__file__))" ]
[ "= bivariate_normal(X, Y, 1.5, 0.5, 1, 1) Z = 10", "# to make the bad region transparent. This is the", "palette.set* lines, you will see # all the defaults; under", "palette.set_under('g', 1.0) palette.set_bad('b', 1.0) # Alternatively, we could use #", "range to which the regular palette color scale is applied.", "of BoundaryNorm to get a filled contour effect. ''' from", "lines, you will see # all the defaults; under and", "contour effect. ''' from pylab import * from numpy import", "could use # palette.set_bad(alpha = 0.0) # to make the", "> 1.2, Z) # By setting vmin and vmax in", "out all the palette.set* lines, you will see # all", "norm, we establish the # range to which the regular", "over will be colored with the # first and last", "interpolation='bilinear', cmap=palette, norm = colors.Normalize(vmin = -1.0, vmax = 1.0,", "Red=high, Blue=bad') colorbar(im, extend='both', orientation='horizontal', shrink=0.8) subplot(1,2,2) im = imshow(Zm,", "1], ncolors=256, clip = False), origin='lower', extent=[-3,3,-3,3]) title('With BoundaryNorm') colorbar(im,", "colorbar(im, extend='both', orientation='horizontal', shrink=0.8) subplot(1,2,2) im = imshow(Zm, interpolation='nearest', cmap=palette,", "import ma import matplotlib.colors as colors delta = 0.025 x", "the # first and last colors in the palette, respectively.", "ma import matplotlib.colors as colors delta = 0.025 x =", "palette.set_over, etc. subplot(1,2,1) im = imshow(Zm, interpolation='bilinear', cmap=palette, norm =", "= colors.Normalize(vmin = -1.0, vmax = 1.0, clip = False),", "# difference of Gaussians # Set up a colormap: palette", "'''imshow with masked array input and out-of-range colors. The second", "the default. # If you comment out all the palette.set*", "the # range to which the regular palette color scale", "-0.2, 0, 0.2, 0.5, 1], ncolors=256, clip = False), origin='lower',", "The second subplot illustrates the use of BoundaryNorm to get", "norm = colors.BoundaryNorm([-1, -0.5, -0.2, 0, 0.2, 0.5, 1], ncolors=256,", "use # palette.set_bad(alpha = 0.0) # to make the bad", "be colored with the # first and last colors in", "make the bad region transparent. This is the default. #", "colors in the palette, respectively. Zm = ma.masked_where(Z > 1.2,", "with masked array input and out-of-range colors. The second subplot", "the defaults; under and over will be colored with the", "# Alternatively, we could use # palette.set_bad(alpha = 0.0) #", "# first and last colors in the palette, respectively. Zm", "all the palette.set* lines, you will see # all the", "masked array input and out-of-range colors. The second subplot illustrates", "# Set up a colormap: palette = cm.gray palette.set_over('r', 1.0)", "clip = False), origin='lower', extent=[-3,3,-3,3]) title('With BoundaryNorm') colorbar(im, extend='both', spacing='proportional',", "This is the default. # If you comment out all", "the bad region transparent. This is the default. # If", "imshow(Zm, interpolation='bilinear', cmap=palette, norm = colors.Normalize(vmin = -1.0, vmax =", "* (Z2-Z1) # difference of Gaussians # Set up a", "origin='lower', extent=[-3,3,-3,3]) title('With BoundaryNorm') colorbar(im, extend='both', spacing='proportional', orientation='horizontal', shrink=0.8) show()", "1.0) # Alternatively, we could use # palette.set_bad(alpha = 0.0)", "transparent. This is the default. # If you comment out", "to which the regular palette color scale is applied. #", "bad region transparent. This is the default. # If you", "0.0, 0.0) Z2 = bivariate_normal(X, Y, 1.5, 0.5, 1, 1)", "subplot(1,2,2) im = imshow(Zm, interpolation='nearest', cmap=palette, norm = colors.BoundaryNorm([-1, -0.5,", "setting vmin and vmax in the norm, we establish the", "interpolation='nearest', cmap=palette, norm = colors.BoundaryNorm([-1, -0.5, -0.2, 0, 0.2, 0.5,", "y) Z1 = bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0) Z2", "and over will be colored with the # first and", "vmax = 1.0, clip = False), origin='lower', extent=[-3,3,-3,3]) title('Green=low, Red=high,", "<filename>examples/pylab_examples/image_masked.py #!/usr/bin/env python '''imshow with masked array input and out-of-range", "use of BoundaryNorm to get a filled contour effect. '''", "python '''imshow with masked array input and out-of-range colors. The", "you will see # all the defaults; under and over", "extend='both', orientation='horizontal', shrink=0.8) subplot(1,2,2) im = imshow(Zm, interpolation='nearest', cmap=palette, norm", "ncolors=256, clip = False), origin='lower', extent=[-3,3,-3,3]) title('With BoundaryNorm') colorbar(im, extend='both',", "and vmax in the norm, we establish the # range", "range is colored based on palette.set_over, etc. subplot(1,2,1) im =", "palette, respectively. Zm = ma.masked_where(Z > 1.2, Z) # By", "# If you comment out all the palette.set* lines, you", "im = imshow(Zm, interpolation='bilinear', cmap=palette, norm = colors.Normalize(vmin = -1.0,", "and out-of-range colors. The second subplot illustrates the use of", "1.0) palette.set_under('g', 1.0) palette.set_bad('b', 1.0) # Alternatively, we could use", "1) Z = 10 * (Z2-Z1) # difference of Gaussians", "to make the bad region transparent. This is the default.", "10 * (Z2-Z1) # difference of Gaussians # Set up", "last colors in the palette, respectively. Zm = ma.masked_where(Z >", "is applied. # Anything above that range is colored based", "illustrates the use of BoundaryNorm to get a filled contour", "colored based on palette.set_over, etc. subplot(1,2,1) im = imshow(Zm, interpolation='bilinear',", "see # all the defaults; under and over will be", "import * from numpy import ma import matplotlib.colors as colors", "Alternatively, we could use # palette.set_bad(alpha = 0.0) # to", "will be colored with the # first and last colors", "delta) X, Y = meshgrid(x, y) Z1 = bivariate_normal(X, Y,", "the use of BoundaryNorm to get a filled contour effect.", "scale is applied. # Anything above that range is colored", "ma.masked_where(Z > 1.2, Z) # By setting vmin and vmax", "matplotlib.colors as colors delta = 0.025 x = y =", "from pylab import * from numpy import ma import matplotlib.colors", "0.025 x = y = arange(-3.0, 3.0, delta) X, Y", "= y = arange(-3.0, 3.0, delta) X, Y = meshgrid(x,", "with the # first and last colors in the palette,", "colors.BoundaryNorm([-1, -0.5, -0.2, 0, 0.2, 0.5, 1], ncolors=256, clip =", "from numpy import ma import matplotlib.colors as colors delta =", "palette.set_bad('b', 1.0) # Alternatively, we could use # palette.set_bad(alpha =", "* from numpy import ma import matplotlib.colors as colors delta", "array input and out-of-range colors. The second subplot illustrates the", "= meshgrid(x, y) Z1 = bivariate_normal(X, Y, 1.0, 1.0, 0.0,", "based on palette.set_over, etc. subplot(1,2,1) im = imshow(Zm, interpolation='bilinear', cmap=palette,", "BoundaryNorm to get a filled contour effect. ''' from pylab", "first and last colors in the palette, respectively. Zm =", "= bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0) Z2 = bivariate_normal(X,", "Z) # By setting vmin and vmax in the norm,", "= False), origin='lower', extent=[-3,3,-3,3]) title('With BoundaryNorm') colorbar(im, extend='both', spacing='proportional', orientation='horizontal',", "clip = False), origin='lower', extent=[-3,3,-3,3]) title('Green=low, Red=high, Blue=bad') colorbar(im, extend='both',", "Blue=bad') colorbar(im, extend='both', orientation='horizontal', shrink=0.8) subplot(1,2,2) im = imshow(Zm, interpolation='nearest',", "= imshow(Zm, interpolation='nearest', cmap=palette, norm = colors.BoundaryNorm([-1, -0.5, -0.2, 0,", "pylab import * from numpy import ma import matplotlib.colors as", "1.0, 1.0, 0.0, 0.0) Z2 = bivariate_normal(X, Y, 1.5, 0.5,", "in the palette, respectively. Zm = ma.masked_where(Z > 1.2, Z)", "cmap=palette, norm = colors.BoundaryNorm([-1, -0.5, -0.2, 0, 0.2, 0.5, 1],", "in the norm, we establish the # range to which", "Y = meshgrid(x, y) Z1 = bivariate_normal(X, Y, 1.0, 1.0,", "0.0) Z2 = bivariate_normal(X, Y, 1.5, 0.5, 1, 1) Z", "= colors.BoundaryNorm([-1, -0.5, -0.2, 0, 0.2, 0.5, 1], ncolors=256, clip", "arange(-3.0, 3.0, delta) X, Y = meshgrid(x, y) Z1 =", "1.5, 0.5, 1, 1) Z = 10 * (Z2-Z1) #", "y = arange(-3.0, 3.0, delta) X, Y = meshgrid(x, y)", "x = y = arange(-3.0, 3.0, delta) X, Y =", "vmax in the norm, we establish the # range to", "cm.gray palette.set_over('r', 1.0) palette.set_under('g', 1.0) palette.set_bad('b', 1.0) # Alternatively, we", "Z = 10 * (Z2-Z1) # difference of Gaussians #", "numpy import ma import matplotlib.colors as colors delta = 0.025", "default. # If you comment out all the palette.set* lines,", "palette = cm.gray palette.set_over('r', 1.0) palette.set_under('g', 1.0) palette.set_bad('b', 1.0) #", "1.0, 0.0, 0.0) Z2 = bivariate_normal(X, Y, 1.5, 0.5, 1,", "''' from pylab import * from numpy import ma import", "1, 1) Z = 10 * (Z2-Z1) # difference of", "= ma.masked_where(Z > 1.2, Z) # By setting vmin and", "which the regular palette color scale is applied. # Anything", "= 0.0) # to make the bad region transparent. This", "palette.set_bad(alpha = 0.0) # to make the bad region transparent.", "Zm = ma.masked_where(Z > 1.2, Z) # By setting vmin", "bivariate_normal(X, Y, 1.5, 0.5, 1, 1) Z = 10 *", "Y, 1.5, 0.5, 1, 1) Z = 10 * (Z2-Z1)", "origin='lower', extent=[-3,3,-3,3]) title('Green=low, Red=high, Blue=bad') colorbar(im, extend='both', orientation='horizontal', shrink=0.8) subplot(1,2,2)", "filled contour effect. ''' from pylab import * from numpy", "colors delta = 0.025 x = y = arange(-3.0, 3.0,", "Gaussians # Set up a colormap: palette = cm.gray palette.set_over('r',", "we could use # palette.set_bad(alpha = 0.0) # to make", "difference of Gaussians # Set up a colormap: palette =", "= False), origin='lower', extent=[-3,3,-3,3]) title('Green=low, Red=high, Blue=bad') colorbar(im, extend='both', orientation='horizontal',", "0.0) # to make the bad region transparent. This is", "that range is colored based on palette.set_over, etc. subplot(1,2,1) im", "of Gaussians # Set up a colormap: palette = cm.gray", "colored with the # first and last colors in the", "shrink=0.8) subplot(1,2,2) im = imshow(Zm, interpolation='nearest', cmap=palette, norm = colors.BoundaryNorm([-1,", "Set up a colormap: palette = cm.gray palette.set_over('r', 1.0) palette.set_under('g',", "on palette.set_over, etc. subplot(1,2,1) im = imshow(Zm, interpolation='bilinear', cmap=palette, norm", "etc. subplot(1,2,1) im = imshow(Zm, interpolation='bilinear', cmap=palette, norm = colors.Normalize(vmin", "= 10 * (Z2-Z1) # difference of Gaussians # Set", "vmin and vmax in the norm, we establish the #", "= 0.025 x = y = arange(-3.0, 3.0, delta) X,", "a colormap: palette = cm.gray palette.set_over('r', 1.0) palette.set_under('g', 1.0) palette.set_bad('b',", "-0.5, -0.2, 0, 0.2, 0.5, 1], ncolors=256, clip = False),", "effect. ''' from pylab import * from numpy import ma", "Z1 = bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0) Z2 =", "all the defaults; under and over will be colored with", "Y, 1.0, 1.0, 0.0, 0.0) Z2 = bivariate_normal(X, Y, 1.5,", "color scale is applied. # Anything above that range is", "Anything above that range is colored based on palette.set_over, etc.", "orientation='horizontal', shrink=0.8) subplot(1,2,2) im = imshow(Zm, interpolation='nearest', cmap=palette, norm =", "0.5, 1, 1) Z = 10 * (Z2-Z1) # difference", "By setting vmin and vmax in the norm, we establish", "(Z2-Z1) # difference of Gaussians # Set up a colormap:", "the regular palette color scale is applied. # Anything above", "bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0) Z2 = bivariate_normal(X, Y,", "1.2, Z) # By setting vmin and vmax in the", "delta = 0.025 x = y = arange(-3.0, 3.0, delta)", "region transparent. This is the default. # If you comment", "# all the defaults; under and over will be colored", "# palette.set_bad(alpha = 0.0) # to make the bad region", "extent=[-3,3,-3,3]) title('Green=low, Red=high, Blue=bad') colorbar(im, extend='both', orientation='horizontal', shrink=0.8) subplot(1,2,2) im", "norm = colors.Normalize(vmin = -1.0, vmax = 1.0, clip =", "you comment out all the palette.set* lines, you will see", "= 1.0, clip = False), origin='lower', extent=[-3,3,-3,3]) title('Green=low, Red=high, Blue=bad')", "title('Green=low, Red=high, Blue=bad') colorbar(im, extend='both', orientation='horizontal', shrink=0.8) subplot(1,2,2) im =", "a filled contour effect. ''' from pylab import * from", "0, 0.2, 0.5, 1], ncolors=256, clip = False), origin='lower', extent=[-3,3,-3,3])", "palette color scale is applied. # Anything above that range", "cmap=palette, norm = colors.Normalize(vmin = -1.0, vmax = 1.0, clip", "respectively. Zm = ma.masked_where(Z > 1.2, Z) # By setting", "0.5, 1], ncolors=256, clip = False), origin='lower', extent=[-3,3,-3,3]) title('With BoundaryNorm')", "= arange(-3.0, 3.0, delta) X, Y = meshgrid(x, y) Z1", "to get a filled contour effect. ''' from pylab import", "up a colormap: palette = cm.gray palette.set_over('r', 1.0) palette.set_under('g', 1.0)", "regular palette color scale is applied. # Anything above that", "will see # all the defaults; under and over will", "above that range is colored based on palette.set_over, etc. subplot(1,2,1)", "= imshow(Zm, interpolation='bilinear', cmap=palette, norm = colors.Normalize(vmin = -1.0, vmax", "under and over will be colored with the # first", "palette.set_over('r', 1.0) palette.set_under('g', 1.0) palette.set_bad('b', 1.0) # Alternatively, we could", "defaults; under and over will be colored with the #", "False), origin='lower', extent=[-3,3,-3,3]) title('Green=low, Red=high, Blue=bad') colorbar(im, extend='both', orientation='horizontal', shrink=0.8)", "# Anything above that range is colored based on palette.set_over,", "im = imshow(Zm, interpolation='nearest', cmap=palette, norm = colors.BoundaryNorm([-1, -0.5, -0.2,", "3.0, delta) X, Y = meshgrid(x, y) Z1 = bivariate_normal(X,", "#!/usr/bin/env python '''imshow with masked array input and out-of-range colors.", "Z2 = bivariate_normal(X, Y, 1.5, 0.5, 1, 1) Z =", "the palette, respectively. Zm = ma.masked_where(Z > 1.2, Z) #", "colors.Normalize(vmin = -1.0, vmax = 1.0, clip = False), origin='lower',", "out-of-range colors. The second subplot illustrates the use of BoundaryNorm", "we establish the # range to which the regular palette", "import matplotlib.colors as colors delta = 0.025 x = y", "subplot(1,2,1) im = imshow(Zm, interpolation='bilinear', cmap=palette, norm = colors.Normalize(vmin =", "is colored based on palette.set_over, etc. subplot(1,2,1) im = imshow(Zm,", "colors. The second subplot illustrates the use of BoundaryNorm to", "as colors delta = 0.025 x = y = arange(-3.0,", "1.0, clip = False), origin='lower', extent=[-3,3,-3,3]) title('Green=low, Red=high, Blue=bad') colorbar(im,", "0.2, 0.5, 1], ncolors=256, clip = False), origin='lower', extent=[-3,3,-3,3]) title('With", "establish the # range to which the regular palette color", "input and out-of-range colors. The second subplot illustrates the use", "# range to which the regular palette color scale is", "applied. # Anything above that range is colored based on", "the palette.set* lines, you will see # all the defaults;", "If you comment out all the palette.set* lines, you will", "meshgrid(x, y) Z1 = bivariate_normal(X, Y, 1.0, 1.0, 0.0, 0.0)", "colormap: palette = cm.gray palette.set_over('r', 1.0) palette.set_under('g', 1.0) palette.set_bad('b', 1.0)", "the norm, we establish the # range to which the", "= -1.0, vmax = 1.0, clip = False), origin='lower', extent=[-3,3,-3,3])", "False), origin='lower', extent=[-3,3,-3,3]) title('With BoundaryNorm') colorbar(im, extend='both', spacing='proportional', orientation='horizontal', shrink=0.8)", "second subplot illustrates the use of BoundaryNorm to get a", "and last colors in the palette, respectively. Zm = ma.masked_where(Z", "1.0) palette.set_bad('b', 1.0) # Alternatively, we could use # palette.set_bad(alpha", "imshow(Zm, interpolation='nearest', cmap=palette, norm = colors.BoundaryNorm([-1, -0.5, -0.2, 0, 0.2,", "get a filled contour effect. ''' from pylab import *", "comment out all the palette.set* lines, you will see #", "# By setting vmin and vmax in the norm, we", "subplot illustrates the use of BoundaryNorm to get a filled", "= cm.gray palette.set_over('r', 1.0) palette.set_under('g', 1.0) palette.set_bad('b', 1.0) # Alternatively,", "is the default. # If you comment out all the", "-1.0, vmax = 1.0, clip = False), origin='lower', extent=[-3,3,-3,3]) title('Green=low,", "X, Y = meshgrid(x, y) Z1 = bivariate_normal(X, Y, 1.0," ]
[ "List, Dict from datetime import datetime class Authenticate(BaseModel): access_token: str", "typing import List, Dict from datetime import datetime class Authenticate(BaseModel):", "import List, Dict from datetime import datetime class Authenticate(BaseModel): access_token:", "validator, Field from typing import List, Dict from datetime import", "from pydantic import BaseModel, validator, Field from typing import List,", "Field from typing import List, Dict from datetime import datetime", "from typing import List, Dict from datetime import datetime class", "pydantic import BaseModel, validator, Field from typing import List, Dict", "import BaseModel, validator, Field from typing import List, Dict from", "BaseModel, validator, Field from typing import List, Dict from datetime" ]
[ "\"dankmemes\", \"wholesomememes\", \"memes\", \"terriblefacebookmemes\", \"historymemes\", \"me_irl\", \"2meirl4meirl\", \"fellowkids\", \"tumblr\" ]", "requests.get(f\"https://api.reddit.com/r/{chosen_sub}/top.json?sort=top&t=day&limit=500\", headers={'User-agent': 'Super Bot 9000'}) r = r.json() boxed =", "return elif random.randint(0, 100) < 25: async with message.channel.typing(): chosen_sub", "Randomly spawns memes. \"\"\" subreddits = [ \"dankmemes\", \"wholesomememes\", \"memes\",", "Title: {title}', color=0x6bdcd7) embed.set_author(name=\"A wild meme has appeared!\") embed.set_image(url=image) embed.set_footer(text=f\"On", "\"2meirl4meirl\", \"fellowkids\", \"tumblr\" ] def __init__(self, bot): self.bot = bot", "import discord from discord.ext import commands import requests import random", "= bot @commands.Cog.listener() async def on_message(self, message): if message.author ==", "\"tumblr\" ] def __init__(self, bot): self.bot = bot @commands.Cog.listener() async", "Box class WildMemes(commands.Cog): \"\"\" Randomly spawns memes. \"\"\" subreddits =", "elif random.randint(0, 100) < 25: async with message.channel.typing(): chosen_sub =", "image = data.url upvotes = data.ups title = data.title subreddit", "= discord.Embed(title=f'Meme Title: {title}', color=0x6bdcd7) embed.set_author(name=\"A wild meme has appeared!\")", "\"\"\" Randomly spawns memes. \"\"\" subreddits = [ \"dankmemes\", \"wholesomememes\",", "__init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_message(self, message):", "bot): self.bot = bot @commands.Cog.listener() async def on_message(self, message): if", "embed.set_footer(text=f\"On {subreddit} with {upvotes} upvotes.\") await message.channel.send(embed=embed) def setup(bot): bot.add_cog(WildMemes(bot))", "async with message.channel.typing(): chosen_sub = random.choice(self.subreddits) r = requests.get(f\"https://api.reddit.com/r/{chosen_sub}/top.json?sort=top&t=day&limit=500\", headers={'User-agent':", "import random from box import Box class WildMemes(commands.Cog): \"\"\" Randomly", "message): if message.author == self.bot.user: return elif random.randint(0, 100) <", "= Box(r) data = (random.choice(boxed.data.children)).data image = data.url upvotes =", "from box import Box class WildMemes(commands.Cog): \"\"\" Randomly spawns memes.", "Bot 9000'}) r = r.json() boxed = Box(r) data =", "= data.subreddit_name_prefixed embed = discord.Embed(title=f'Meme Title: {title}', color=0x6bdcd7) embed.set_author(name=\"A wild", "embed = discord.Embed(title=f'Meme Title: {title}', color=0x6bdcd7) embed.set_author(name=\"A wild meme has", "memes. \"\"\" subreddits = [ \"dankmemes\", \"wholesomememes\", \"memes\", \"terriblefacebookmemes\", \"historymemes\",", "== self.bot.user: return elif random.randint(0, 100) < 25: async with", "on_message(self, message): if message.author == self.bot.user: return elif random.randint(0, 100)", "color=0x6bdcd7) embed.set_author(name=\"A wild meme has appeared!\") embed.set_image(url=image) embed.set_footer(text=f\"On {subreddit} with", "r.json() boxed = Box(r) data = (random.choice(boxed.data.children)).data image = data.url", "[ \"dankmemes\", \"wholesomememes\", \"memes\", \"terriblefacebookmemes\", \"historymemes\", \"me_irl\", \"2meirl4meirl\", \"fellowkids\", \"tumblr\"", "commands import requests import random from box import Box class", "= requests.get(f\"https://api.reddit.com/r/{chosen_sub}/top.json?sort=top&t=day&limit=500\", headers={'User-agent': 'Super Bot 9000'}) r = r.json() boxed", "class WildMemes(commands.Cog): \"\"\" Randomly spawns memes. \"\"\" subreddits = [", "requests import random from box import Box class WildMemes(commands.Cog): \"\"\"", "self.bot = bot @commands.Cog.listener() async def on_message(self, message): if message.author", "discord from discord.ext import commands import requests import random from", "random.randint(0, 100) < 25: async with message.channel.typing(): chosen_sub = random.choice(self.subreddits)", "upvotes = data.ups title = data.title subreddit = data.subreddit_name_prefixed embed", "with message.channel.typing(): chosen_sub = random.choice(self.subreddits) r = requests.get(f\"https://api.reddit.com/r/{chosen_sub}/top.json?sort=top&t=day&limit=500\", headers={'User-agent': 'Super", "bot @commands.Cog.listener() async def on_message(self, message): if message.author == self.bot.user:", "{title}', color=0x6bdcd7) embed.set_author(name=\"A wild meme has appeared!\") embed.set_image(url=image) embed.set_footer(text=f\"On {subreddit}", "data.subreddit_name_prefixed embed = discord.Embed(title=f'Meme Title: {title}', color=0x6bdcd7) embed.set_author(name=\"A wild meme", "box import Box class WildMemes(commands.Cog): \"\"\" Randomly spawns memes. \"\"\"", "@commands.Cog.listener() async def on_message(self, message): if message.author == self.bot.user: return", "data.url upvotes = data.ups title = data.title subreddit = data.subreddit_name_prefixed", "random.choice(self.subreddits) r = requests.get(f\"https://api.reddit.com/r/{chosen_sub}/top.json?sort=top&t=day&limit=500\", headers={'User-agent': 'Super Bot 9000'}) r =", "'Super Bot 9000'}) r = r.json() boxed = Box(r) data", "import Box class WildMemes(commands.Cog): \"\"\" Randomly spawns memes. \"\"\" subreddits", "self.bot.user: return elif random.randint(0, 100) < 25: async with message.channel.typing():", "import commands import requests import random from box import Box", "= (random.choice(boxed.data.children)).data image = data.url upvotes = data.ups title =", "25: async with message.channel.typing(): chosen_sub = random.choice(self.subreddits) r = requests.get(f\"https://api.reddit.com/r/{chosen_sub}/top.json?sort=top&t=day&limit=500\",", "appeared!\") embed.set_image(url=image) embed.set_footer(text=f\"On {subreddit} with {upvotes} upvotes.\") await message.channel.send(embed=embed) def", "9000'}) r = r.json() boxed = Box(r) data = (random.choice(boxed.data.children)).data", "= data.title subreddit = data.subreddit_name_prefixed embed = discord.Embed(title=f'Meme Title: {title}',", "r = requests.get(f\"https://api.reddit.com/r/{chosen_sub}/top.json?sort=top&t=day&limit=500\", headers={'User-agent': 'Super Bot 9000'}) r = r.json()", "data.title subreddit = data.subreddit_name_prefixed embed = discord.Embed(title=f'Meme Title: {title}', color=0x6bdcd7)", "embed.set_image(url=image) embed.set_footer(text=f\"On {subreddit} with {upvotes} upvotes.\") await message.channel.send(embed=embed) def setup(bot):", "WildMemes(commands.Cog): \"\"\" Randomly spawns memes. \"\"\" subreddits = [ \"dankmemes\",", "\"me_irl\", \"2meirl4meirl\", \"fellowkids\", \"tumblr\" ] def __init__(self, bot): self.bot =", "data = (random.choice(boxed.data.children)).data image = data.url upvotes = data.ups title", "from discord.ext import commands import requests import random from box", "\"terriblefacebookmemes\", \"historymemes\", \"me_irl\", \"2meirl4meirl\", \"fellowkids\", \"tumblr\" ] def __init__(self, bot):", "message.author == self.bot.user: return elif random.randint(0, 100) < 25: async", "has appeared!\") embed.set_image(url=image) embed.set_footer(text=f\"On {subreddit} with {upvotes} upvotes.\") await message.channel.send(embed=embed)", "100) < 25: async with message.channel.typing(): chosen_sub = random.choice(self.subreddits) r", "boxed = Box(r) data = (random.choice(boxed.data.children)).data image = data.url upvotes", "meme has appeared!\") embed.set_image(url=image) embed.set_footer(text=f\"On {subreddit} with {upvotes} upvotes.\") await", "<reponame>aniket091/modmail-plugins-1 import discord from discord.ext import commands import requests import", "\"\"\" subreddits = [ \"dankmemes\", \"wholesomememes\", \"memes\", \"terriblefacebookmemes\", \"historymemes\", \"me_irl\",", "Box(r) data = (random.choice(boxed.data.children)).data image = data.url upvotes = data.ups", "def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_message(self,", "if message.author == self.bot.user: return elif random.randint(0, 100) < 25:", "discord.Embed(title=f'Meme Title: {title}', color=0x6bdcd7) embed.set_author(name=\"A wild meme has appeared!\") embed.set_image(url=image)", "\"memes\", \"terriblefacebookmemes\", \"historymemes\", \"me_irl\", \"2meirl4meirl\", \"fellowkids\", \"tumblr\" ] def __init__(self,", "import requests import random from box import Box class WildMemes(commands.Cog):", "= r.json() boxed = Box(r) data = (random.choice(boxed.data.children)).data image =", "chosen_sub = random.choice(self.subreddits) r = requests.get(f\"https://api.reddit.com/r/{chosen_sub}/top.json?sort=top&t=day&limit=500\", headers={'User-agent': 'Super Bot 9000'})", "spawns memes. \"\"\" subreddits = [ \"dankmemes\", \"wholesomememes\", \"memes\", \"terriblefacebookmemes\",", "wild meme has appeared!\") embed.set_image(url=image) embed.set_footer(text=f\"On {subreddit} with {upvotes} upvotes.\")", "r = r.json() boxed = Box(r) data = (random.choice(boxed.data.children)).data image", "async def on_message(self, message): if message.author == self.bot.user: return elif", "] def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def", "embed.set_author(name=\"A wild meme has appeared!\") embed.set_image(url=image) embed.set_footer(text=f\"On {subreddit} with {upvotes}", "= data.ups title = data.title subreddit = data.subreddit_name_prefixed embed =", "title = data.title subreddit = data.subreddit_name_prefixed embed = discord.Embed(title=f'Meme Title:", "\"fellowkids\", \"tumblr\" ] def __init__(self, bot): self.bot = bot @commands.Cog.listener()", "data.ups title = data.title subreddit = data.subreddit_name_prefixed embed = discord.Embed(title=f'Meme", "subreddit = data.subreddit_name_prefixed embed = discord.Embed(title=f'Meme Title: {title}', color=0x6bdcd7) embed.set_author(name=\"A", "= [ \"dankmemes\", \"wholesomememes\", \"memes\", \"terriblefacebookmemes\", \"historymemes\", \"me_irl\", \"2meirl4meirl\", \"fellowkids\",", "\"wholesomememes\", \"memes\", \"terriblefacebookmemes\", \"historymemes\", \"me_irl\", \"2meirl4meirl\", \"fellowkids\", \"tumblr\" ] def", "random from box import Box class WildMemes(commands.Cog): \"\"\" Randomly spawns", "def on_message(self, message): if message.author == self.bot.user: return elif random.randint(0,", "subreddits = [ \"dankmemes\", \"wholesomememes\", \"memes\", \"terriblefacebookmemes\", \"historymemes\", \"me_irl\", \"2meirl4meirl\",", "= data.url upvotes = data.ups title = data.title subreddit =", "< 25: async with message.channel.typing(): chosen_sub = random.choice(self.subreddits) r =", "message.channel.typing(): chosen_sub = random.choice(self.subreddits) r = requests.get(f\"https://api.reddit.com/r/{chosen_sub}/top.json?sort=top&t=day&limit=500\", headers={'User-agent': 'Super Bot", "discord.ext import commands import requests import random from box import", "= random.choice(self.subreddits) r = requests.get(f\"https://api.reddit.com/r/{chosen_sub}/top.json?sort=top&t=day&limit=500\", headers={'User-agent': 'Super Bot 9000'}) r", "(random.choice(boxed.data.children)).data image = data.url upvotes = data.ups title = data.title", "headers={'User-agent': 'Super Bot 9000'}) r = r.json() boxed = Box(r)", "\"historymemes\", \"me_irl\", \"2meirl4meirl\", \"fellowkids\", \"tumblr\" ] def __init__(self, bot): self.bot" ]
[ "Registry MultiSZ multi_sz = cli.get_reg_multi_sz(r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', 'Machine') # Output Lines self.output", "[r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', ('Machine=')] + multi_sz # Recommended MultiSZ rec_multi_sz = (\"System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server", "<NAME> # <EMAIL> # <EMAIL> # # 10/24/2014 Original Construction", "'Machine') # Output Lines self.output = [r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', ('Machine=')] + multi_sz", "10/24/2014 Original Construction ################################################################################ class Finding: def __init__(self): self.output =", "def __init__(self): self.output = [] self.is_compliant = False self.uuid =", "= \"20bdcef0-5cc5-11e4-af55-00155d01fe08\" def check(self, cli): # Initialize Compliance self.is_compliant =", "self.is_compliant = False return self.is_compliant def fix(self, cli): cli.powershell(r\"New-Item -path", "Finding: def __init__(self): self.output = [] self.is_compliant = False self.uuid", "self.is_compliant = False self.uuid = \"20bdcef0-5cc5-11e4-af55-00155d01fe08\" def check(self, cli): #", "NT\\CurrentVersion\") for sz in multi_sz: if sz.lower() not in rec_multi_sz.lower():", "cli.get_reg_multi_sz(r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', 'Machine') # Output Lines self.output = [r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', ('Machine=')] +", "################################################################################ # 20bdcef0-5cc5-11e4-af55-00155d01fe08 # # <NAME> # <EMAIL> # <EMAIL>", "#!/usr/bin/python ################################################################################ # 20bdcef0-5cc5-11e4-af55-00155d01fe08 # # <NAME> # <EMAIL> #", "'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'\") cli.powershell(r\"Set-ItemProperty -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths' -name 'Machine' -Type", "-path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'\") cli.powershell(r\"Set-ItemProperty -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths' -name 'Machine' -Type MultiString -value", "self.is_compliant = True # Get Registry MultiSZ multi_sz = cli.get_reg_multi_sz(r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths',", "-path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths' -name 'Machine' -Type MultiString -value System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows NT\\CurrentVersion\")", "# Recommended MultiSZ rec_multi_sz = (\"System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows NT\\CurrentVersion\") for sz", "Applications,Software\\Microsoft\\Windows NT\\CurrentVersion\") for sz in multi_sz: if sz.lower() not in", "= [] self.is_compliant = False self.uuid = \"20bdcef0-5cc5-11e4-af55-00155d01fe08\" def check(self,", "= False self.uuid = \"20bdcef0-5cc5-11e4-af55-00155d01fe08\" def check(self, cli): # Initialize", "# 10/24/2014 Original Construction ################################################################################ class Finding: def __init__(self): self.output", "Compliance self.is_compliant = True # Get Registry MultiSZ multi_sz =", "not in rec_multi_sz.lower(): self.is_compliant = False return self.is_compliant def fix(self,", "Lines self.output = [r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', ('Machine=')] + multi_sz # Recommended MultiSZ", "MultiSZ rec_multi_sz = (\"System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows NT\\CurrentVersion\") for sz in multi_sz:", "= False return self.is_compliant def fix(self, cli): cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers'\")", "'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'\") cli.powershell(r\"Set-ItemProperty -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'", "__init__(self): self.output = [] self.is_compliant = False self.uuid = \"20bdcef0-5cc5-11e4-af55-00155d01fe08\"", "self.uuid = \"20bdcef0-5cc5-11e4-af55-00155d01fe08\" def check(self, cli): # Initialize Compliance self.is_compliant", "# # 10/24/2014 Original Construction ################################################################################ class Finding: def __init__(self):", "True # Get Registry MultiSZ multi_sz = cli.get_reg_multi_sz(r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', 'Machine') #", "('Machine=')] + multi_sz # Recommended MultiSZ rec_multi_sz = (\"System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows", "-path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'\") cli.powershell(r\"Set-ItemProperty -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths' -name 'Machine'", "[] self.is_compliant = False self.uuid = \"20bdcef0-5cc5-11e4-af55-00155d01fe08\" def check(self, cli):", "# <EMAIL> # # 10/24/2014 Original Construction ################################################################################ class Finding:", "sz in multi_sz: if sz.lower() not in rec_multi_sz.lower(): self.is_compliant =", "def fix(self, cli): cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg'\") cli.powershell(r\"New-Item", "# Get Registry MultiSZ multi_sz = cli.get_reg_multi_sz(r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', 'Machine') # Output", "20bdcef0-5cc5-11e4-af55-00155d01fe08 # # <NAME> # <EMAIL> # <EMAIL> # #", "return self.is_compliant def fix(self, cli): cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers'\") cli.powershell(r\"New-Item -path", "if sz.lower() not in rec_multi_sz.lower(): self.is_compliant = False return self.is_compliant", "# Output Lines self.output = [r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', ('Machine=')] + multi_sz #", "# 20bdcef0-5cc5-11e4-af55-00155d01fe08 # # <NAME> # <EMAIL> # <EMAIL> #", "# <EMAIL> # <EMAIL> # # 10/24/2014 Original Construction ################################################################################", "for sz in multi_sz: if sz.lower() not in rec_multi_sz.lower(): self.is_compliant", "Get Registry MultiSZ multi_sz = cli.get_reg_multi_sz(r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', 'Machine') # Output Lines", "cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'\") cli.powershell(r\"Set-ItemProperty", "self.output = [r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', ('Machine=')] + multi_sz # Recommended MultiSZ rec_multi_sz", "Initialize Compliance self.is_compliant = True # Get Registry MultiSZ multi_sz", "'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'\") cli.powershell(r\"Set-ItemProperty -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths' -name 'Machine' -Type MultiString -value System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server", "<EMAIL> # <EMAIL> # # 10/24/2014 Original Construction ################################################################################ class", "cli): cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'\")", "self.is_compliant def fix(self, cli): cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg'\")", "self.output = [] self.is_compliant = False self.uuid = \"20bdcef0-5cc5-11e4-af55-00155d01fe08\" def", "False self.uuid = \"20bdcef0-5cc5-11e4-af55-00155d01fe08\" def check(self, cli): # Initialize Compliance", "# <NAME> # <EMAIL> # <EMAIL> # # 10/24/2014 Original", "Recommended MultiSZ rec_multi_sz = (\"System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows NT\\CurrentVersion\") for sz in", "+ multi_sz # Recommended MultiSZ rec_multi_sz = (\"System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows NT\\CurrentVersion\")", "rec_multi_sz = (\"System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows NT\\CurrentVersion\") for sz in multi_sz: if", "fix(self, cli): cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg'\") cli.powershell(r\"New-Item -path", "\"20bdcef0-5cc5-11e4-af55-00155d01fe08\" def check(self, cli): # Initialize Compliance self.is_compliant = True", "<EMAIL> # # 10/24/2014 Original Construction ################################################################################ class Finding: def", "def check(self, cli): # Initialize Compliance self.is_compliant = True #", "cli): # Initialize Compliance self.is_compliant = True # Get Registry", "(\"System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows NT\\CurrentVersion\") for sz in multi_sz: if sz.lower() not", "-path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'\") cli.powershell(r\"Set-ItemProperty -path", "= (\"System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows NT\\CurrentVersion\") for sz in multi_sz: if sz.lower()", "in rec_multi_sz.lower(): self.is_compliant = False return self.is_compliant def fix(self, cli):", "= cli.get_reg_multi_sz(r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', 'Machine') # Output Lines self.output = [r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', ('Machine=')]", "cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'\") cli.powershell(r\"Set-ItemProperty -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths' -name 'Machine' -Type MultiString", "= True # Get Registry MultiSZ multi_sz = cli.get_reg_multi_sz(r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', 'Machine')", "cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg'\") cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths'\") cli.powershell(r\"Set-ItemProperty -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths' -name", "Original Construction ################################################################################ class Finding: def __init__(self): self.output = []", "Output Lines self.output = [r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', ('Machine=')] + multi_sz # Recommended", "rec_multi_sz.lower(): self.is_compliant = False return self.is_compliant def fix(self, cli): cli.powershell(r\"New-Item", "sz.lower() not in rec_multi_sz.lower(): self.is_compliant = False return self.is_compliant def", "class Finding: def __init__(self): self.output = [] self.is_compliant = False", "False return self.is_compliant def fix(self, cli): cli.powershell(r\"New-Item -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers'\") cli.powershell(r\"New-Item", "################################################################################ class Finding: def __init__(self): self.output = [] self.is_compliant =", "MultiSZ multi_sz = cli.get_reg_multi_sz(r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', 'Machine') # Output Lines self.output =", "# # <NAME> # <EMAIL> # <EMAIL> # # 10/24/2014", "in multi_sz: if sz.lower() not in rec_multi_sz.lower(): self.is_compliant = False", "cli.powershell(r\"Set-ItemProperty -path 'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths' -name 'Machine' -Type MultiString -value System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows", "Construction ################################################################################ class Finding: def __init__(self): self.output = [] self.is_compliant", "check(self, cli): # Initialize Compliance self.is_compliant = True # Get", "# Initialize Compliance self.is_compliant = True # Get Registry MultiSZ", "multi_sz # Recommended MultiSZ rec_multi_sz = (\"System\\CurrentControlSet\\Control\\ProductOptions,System\\CurrentControlSet\\Control\\Server Applications,Software\\Microsoft\\Windows NT\\CurrentVersion\") for", "= [r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', ('Machine=')] + multi_sz # Recommended MultiSZ rec_multi_sz =", "multi_sz = cli.get_reg_multi_sz(r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths', 'Machine') # Output Lines self.output = [r'HKLM:\\SYSTEM\\CurrentControlSet\\control\\SecurePipeServers\\winreg\\allowedExactPaths',", "multi_sz: if sz.lower() not in rec_multi_sz.lower(): self.is_compliant = False return" ]
[ "MikanException(Exception): \"\"\"Generic Mikan exception\"\"\" class ConversionError(MikanException, ValueError): \"\"\"Cannot convert a", "<reponame>dzzhvks94vd2/mikan class MikanException(Exception): \"\"\"Generic Mikan exception\"\"\" class ConversionError(MikanException, ValueError): \"\"\"Cannot", "class MikanException(Exception): \"\"\"Generic Mikan exception\"\"\" class ConversionError(MikanException, ValueError): \"\"\"Cannot convert", "\"\"\"Generic Mikan exception\"\"\" class ConversionError(MikanException, ValueError): \"\"\"Cannot convert a string\"\"\"" ]
[ "path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'), path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'), path('reset/done/', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'), path('applicantdashboard/',", "name='login'), path('logout/', views.userlogout, name='logout'), path('password_change/', auth_views.PasswordChangeView.as_view(), name='password_change'), path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'),", "path from django.contrib.auth import views as auth_views from . import", "<reponame>hhhameem/CV-Recommender<gh_stars>1-10 from django.urls import path from django.contrib.auth import views as", "auth_views from . import views urlpatterns = [ path('register/', views.register,", "path('applicantdashboard/', views.applicantdashboard, name='applicantdashboard'), path('recruiterdashboard/', views.recruiterdashboard, name='recruiterdashboard'), path('applicantdashboard/profile-edit/', views.applicantedit, name='editapplicantprofile'), path('recruiterdashboard/profile-edit/',", "import views as auth_views from . import views urlpatterns =", "views.register, name='register'), path('login/', views.userlogin, name='login'), path('logout/', views.userlogout, name='logout'), path('password_change/', auth_views.PasswordChangeView.as_view(),", "auth_views.PasswordResetView.as_view(), name='password_reset'), path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'), path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'), path('reset/done/', auth_views.PasswordResetCompleteView.as_view(),", "import views urlpatterns = [ path('register/', views.register, name='register'), path('login/', views.userlogin,", "name='register'), path('login/', views.userlogin, name='login'), path('logout/', views.userlogout, name='logout'), path('password_change/', auth_views.PasswordChangeView.as_view(), name='password_change'),", "name='password_change_done'), path('password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'), path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'), path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'),", "from . import views urlpatterns = [ path('register/', views.register, name='register'),", "views.userlogout, name='logout'), path('password_change/', auth_views.PasswordChangeView.as_view(), name='password_change'), path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), path('password_reset/', auth_views.PasswordResetView.as_view(),", "django.contrib.auth import views as auth_views from . import views urlpatterns", "path('password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'), path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'), path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'), path('reset/done/',", "path('reset/done/', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'), path('applicantdashboard/', views.applicantdashboard, name='applicantdashboard'), path('recruiterdashboard/', views.recruiterdashboard, name='recruiterdashboard'), path('applicantdashboard/profile-edit/',", "views urlpatterns = [ path('register/', views.register, name='register'), path('login/', views.userlogin, name='login'),", "views.applicantdashboard, name='applicantdashboard'), path('recruiterdashboard/', views.recruiterdashboard, name='recruiterdashboard'), path('applicantdashboard/profile-edit/', views.applicantedit, name='editapplicantprofile'), path('recruiterdashboard/profile-edit/', views.recruiteredit,", "name='password_reset_confirm'), path('reset/done/', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'), path('applicantdashboard/', views.applicantdashboard, name='applicantdashboard'), path('recruiterdashboard/', views.recruiterdashboard, name='recruiterdashboard'),", "views.userlogin, name='login'), path('logout/', views.userlogout, name='logout'), path('password_change/', auth_views.PasswordChangeView.as_view(), name='password_change'), path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(),", "auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), path('password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'), path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'), path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(),", "path('recruiterdashboard/', views.recruiterdashboard, name='recruiterdashboard'), path('applicantdashboard/profile-edit/', views.applicantedit, name='editapplicantprofile'), path('recruiterdashboard/profile-edit/', views.recruiteredit, name='editrecruiterprofile'), ]", "name='password_reset_done'), path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'), path('reset/done/', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'), path('applicantdashboard/', views.applicantdashboard, name='applicantdashboard'),", "name='logout'), path('password_change/', auth_views.PasswordChangeView.as_view(), name='password_change'), path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), path('password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'),", "auth_views.PasswordChangeView.as_view(), name='password_change'), path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), path('password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'), path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(),", "path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), path('password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'), path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'), path('reset/<uidb64>/<token>/',", "as auth_views from . import views urlpatterns = [ path('register/',", "name='password_reset_complete'), path('applicantdashboard/', views.applicantdashboard, name='applicantdashboard'), path('recruiterdashboard/', views.recruiterdashboard, name='recruiterdashboard'), path('applicantdashboard/profile-edit/', views.applicantedit, name='editapplicantprofile'),", "path('logout/', views.userlogout, name='logout'), path('password_change/', auth_views.PasswordChangeView.as_view(), name='password_change'), path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), path('password_reset/',", "name='password_reset'), path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'), path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'), path('reset/done/', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'),", "django.urls import path from django.contrib.auth import views as auth_views from", "name='applicantdashboard'), path('recruiterdashboard/', views.recruiterdashboard, name='recruiterdashboard'), path('applicantdashboard/profile-edit/', views.applicantedit, name='editapplicantprofile'), path('recruiterdashboard/profile-edit/', views.recruiteredit, name='editrecruiterprofile'),", "import path from django.contrib.auth import views as auth_views from .", ". import views urlpatterns = [ path('register/', views.register, name='register'), path('login/',", "name='password_change'), path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), path('password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'), path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'),", "views as auth_views from . import views urlpatterns = [", "[ path('register/', views.register, name='register'), path('login/', views.userlogin, name='login'), path('logout/', views.userlogout, name='logout'),", "= [ path('register/', views.register, name='register'), path('login/', views.userlogin, name='login'), path('logout/', views.userlogout,", "path('login/', views.userlogin, name='login'), path('logout/', views.userlogout, name='logout'), path('password_change/', auth_views.PasswordChangeView.as_view(), name='password_change'), path('password_change/done/',", "auth_views.PasswordResetDoneView.as_view(), name='password_reset_done'), path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'), path('reset/done/', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'), path('applicantdashboard/', views.applicantdashboard,", "path('register/', views.register, name='register'), path('login/', views.userlogin, name='login'), path('logout/', views.userlogout, name='logout'), path('password_change/',", "path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'), path('reset/done/', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'), path('applicantdashboard/', views.applicantdashboard, name='applicantdashboard'), path('recruiterdashboard/',", "auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'), path('applicantdashboard/', views.applicantdashboard, name='applicantdashboard'), path('recruiterdashboard/', views.recruiterdashboard, name='recruiterdashboard'), path('applicantdashboard/profile-edit/', views.applicantedit,", "from django.contrib.auth import views as auth_views from . import views", "auth_views.PasswordResetConfirmView.as_view(), name='password_reset_confirm'), path('reset/done/', auth_views.PasswordResetCompleteView.as_view(), name='password_reset_complete'), path('applicantdashboard/', views.applicantdashboard, name='applicantdashboard'), path('recruiterdashboard/', views.recruiterdashboard,", "urlpatterns = [ path('register/', views.register, name='register'), path('login/', views.userlogin, name='login'), path('logout/',", "from django.urls import path from django.contrib.auth import views as auth_views", "path('password_change/', auth_views.PasswordChangeView.as_view(), name='password_change'), path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(), name='password_change_done'), path('password_reset/', auth_views.PasswordResetView.as_view(), name='password_reset'), path('password_reset/done/'," ]
[ "def generate_docker_compose(host, conf_path, extra_vars, extra_vars_file): template = 'template/docker/compose/docker-compose.yml' ansible_arg =", "_to_backup(local_path) show_diff(local_path, backup_path) upload_conf_file(backup_path, host, container, conf_path, relative_to) restart_container(host, container)", "def revert_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): local_path = get_local_path(host, container,", "/ Path(conf_path).relative_to(relative_to)) def get_local_path(host, container, conf_path, version=None): if version is", "def apply_proxy_conf(host, version=None, restart=True): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') apply_conf(host, 'proxy', str(conf_path),", "def setup_shibboleth_part(local_path, **params): if params is None or len(params) ==", "== conf_path) def _match_metainfo_by_remote_path(parent, remote_path): p = parent / META_YML", "len(diff) def upload_conf_file(src, host, container, conf_path, relative_to=CONF_RELATIVE): dest = generate_remote_path(container,", "yaml.safe_load(f) return ( isinstance(params, dict) and 'container' in params and", "relative_to=CONF_RELATIVE): dest = generate_remote_path(container, conf_path, relative_to) ansible_arg = f'mkdir -p", "-d --remove-orphans' args = ['ansible', host, '-a', ansible_arg] logger.info('Apply the", "'-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {remote_path} from {local_path} to", "container {container}') subprocess.check_call(['ansible', host, '-a', cmd]) def fetch_conf(host, container, conf_path,", "datetime import datetime from difflib import unified_diff from logging import", "relative_to)), 'version': list(local_path.parts)[-2], } with (local_path.parent / META_YML).open(mode='w') as f:", "local_path = download_file(host, remote_path) make_backup(local_path) make_simple_metainfo(local_path, remote_path) shibboleth_vars = setup_shibboleth_part(local_path,", "= 'proxy' link = fetch_conf(host, container, str(conf_path), str(conf_path.parent)) version =", "return len(diff) def upload_conf_file(src, host, container, conf_path, relative_to=CONF_RELATIVE): dest =", "and 'container_path' in params and params['container'] == container and params['container_path']", "return (Path(MOODLE_DIR) / container / 'conf' / Path(conf_path).relative_to(relative_to)) def get_local_path(host,", "{dest} from {src} to {host_1}') def restart_container(host, container): cmd =", "extra_vars, extra_vars_file): template = 'template/docker/compose/docker-compose.yml' ansible_arg = f'src={template} dest={conf_path.parent}/' env", "x in extra_vars_file: args.extend(['-e', f'@{str(x)}']) subprocess.run(args=args, env=env, check=True) def update_docker_compose(host,", "'-m', 'fetch', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Downloading {remote_path} from", "= getLogger(__name__) basicConfig(level=INFO, format='%(message)s') def generate_local_path(host, conf_path, version=None): ret =", "import basicConfig, getLogger, INFO import os from pathlib import Path", "ansible_arg = f'dest={remote_path} src={local_path} backup=yes' out = subprocess.check_output( ['ansible', host,", "from pathlib import Path import shutil import subprocess import sys", "in extra_vars.items(): args.extend(['-e', f'{k}={v}']) subprocess.run(args=args, env=env, check=True) def update_proxy_conf(host, extra_vars={}):", "check=True) def update_docker_compose(host, extra_vars={}, shibboleth_params={}): remote_path = MOODLE_DIR + '/docker-compose.yml'", "generate_edit_link(local_path) def append_shibboleth_container(host, moodle_url, volumes=[], extra_vars={}): hostname = urlparse(moodle_url).netloc return", "[ 'ansible', host, '-m', 'template', '-c', 'local', '-a', ansible_arg] for", "= Path('/usr/local/apache2/conf/moodle-proxy.conf') container = 'proxy' link = fetch_conf(host, container, str(conf_path),", "and params['remote_path'] == remote_path) def get_versions(host, *args, match=_match_metainfo): pdir =", "update_docker_compose(host, extra_vars={}, shibboleth_params={}): remote_path = MOODLE_DIR + '/docker-compose.yml' local_path =", "out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Downloading {remote_path} from {host_1} to {dest}') return dest def", "f: print(f.read()) def edit_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): conf =", "create_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path = create_conf_file(host, conf_path) make_backup(local_path,", "in extra_vars.items(): args.extend(['-e', f'{k}={v}']) for x in extra_vars_file: args.extend(['-e', f'@{str(x)}'])", "'container': container, 'container_path': conf_path, 'remote_path': str(generate_remote_path(container, conf_path, relative_to)), 'version': list(local_path.parts)[-2],", "def make_backup(conf, quiet=False): org = _to_backup(conf) if not quiet: logger.info(f'Copy", "ret /= Path(conf_path).name return ret def generate_remote_path(container, conf_path, relative_to=CONF_RELATIVE): return", "src = generate_remote_path(container, conf_path, relative_to) return download_file(host, src, conf_path) def", "ret = Path(WORKDIR).absolute() / host if version is None: ret", "= [ 'ansible', host, '-m', 'template', '-c', 'local', '-a', ansible_arg]", "= Path(remote_path).name dest = generate_local_path(host, conf_path) ansible_arg = f'src={remote_path} dest={dest}", "host, remote_path, match=_match_metainfo_by_remote_path)[-1] def download_file(host, remote_path, conf_path=None): if conf_path is", "if version is None: ret /= datetime.now().strftime(\"%Y%m%d%H%M%S%f\") else: ret /=", "**params) def generate_docker_compose(host, conf_path, extra_vars, extra_vars_file): template = 'template/docker/compose/docker-compose.yml' ansible_arg", "def get_local_path(host, container, conf_path, version=None): if version is None: version", "edit_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host, container, conf_path,", "dest.touch() return dest def _to_backup(conf): return conf.parent / (conf.name +", "local_path.rename(local_path.parent / (local_path.name + '.revert')) def show_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE,", "MOODLE_DIR + '/docker-compose.yml' local_path = download_file(host, remote_path) make_backup(local_path) make_simple_metainfo(local_path, remote_path)", "= '/etc' ENV_INHERIT = ['VAULT_ADDR', 'VAULT_TOKEN', 'PATH', 'REQUESTS_CA_BUNDLE'] logger =", "f'src={template} dest={conf_path.parent}/moodle-proxy.conf' env = dict([(x, os.environ[x]) for x in ENV_INHERIT])", "f'dest={remote_path} src={local_path} backup=yes' out = subprocess.check_output( ['ansible', host, '-m', 'copy',", "pdir.glob('*') if x.is_dir() and match(x, *args)]) def find_latest_version(host, container, conf_path):", "= f'chdir={MOODLE_DIR} docker-compose restart {container}' logger.info(f'Restart container {container}') subprocess.check_call(['ansible', host,", "f: lines_a = f.readlines() with path_b.open() as f: lines_b =", "host, '-a', ansible_arg] logger.info('Apply the changes in docker-compose.yml.') subprocess.run(args=args, check=True)", "yaml.safe_dump(params, stream=f, default_flow_style=False) def generate_edit_link(conf): nb_conf = list(notebookapp.list_running_servers())[0] p =", "conf_path, relative_to=CONF_RELATIVE, create=False): local_path = download_conf_file(host, container, conf_path, relative_to) make_backup(local_path)", "['VAULT_ADDR', 'VAULT_TOKEN', 'PATH', 'REQUESTS_CA_BUNDLE'] logger = getLogger(__name__) basicConfig(level=INFO, format='%(message)s') def", "ansible_arg = f'src={template} dest={conf_path.parent}/' env = dict([(x, os.environ[x]) for x", "f: yaml.safe_dump(params, f) return vars_path def setup_shibboleth_part(local_path, **params): if params", "sys.stdout.writelines(diff) return len(diff) def upload_conf_file(src, host, container, conf_path, relative_to=CONF_RELATIVE): dest", "make_metainfo(local_path, container, conf_path, relative_to=CONF_RELATIVE): params = { 'container': container, 'container_path':", "container, conf_path, version) show_diff(_to_backup(local_path), local_path) def save_shibboleth_part(conf_path): with conf_path.open() as", "import HTML WORKDIR = 'edit' META_YML = '.vcp-meta.yml' MOODLE_DIR =", "'copy', '-b', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {dest} from", "show_local_conf_diff(host, container, conf_path, version) local_path = get_local_path(host, container, conf_path, version)", "'-b', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {remote_path} from {local_path}", "dest={conf_path.parent}/' env = dict([(x, os.environ[x]) for x in ENV_INHERIT]) args", "dest def download_conf_file(host, container, conf_path, relative_to=CONF_RELATIVE): src = generate_remote_path(container, conf_path,", "download_conf_file(host, container, conf_path, relative_to=CONF_RELATIVE): src = generate_remote_path(container, conf_path, relative_to) return", "restart=True): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') apply_conf(host, 'proxy', str(conf_path), str(conf_path.parent), version, restart)", "'container_name': 'shibboleth', 'hostname': hostname, }), } vars_path = conf_dir /", "quiet=True) make_metainfo(local_path, container, conf_path, relative_to) return generate_edit_link(local_path) def apply_conf(host, container,", "return dest def _to_backup(conf): return conf.parent / (conf.name + '.orig')", "(conf.name + '.orig') def make_backup(conf, quiet=False): org = _to_backup(conf) if", "0: return save_shibboleth_part(local_path) else: return init_shibboleth_part(local_path.parent, **params) def generate_docker_compose(host, conf_path,", "['ansible', host, '-m', 'template', '-c', 'local', '-a', ansible_arg] for k,", "make_metainfo(local_path, container, conf_path, relative_to) return generate_edit_link(local_path) def create_conf(host, container, conf_path,", "'edit' META_YML = '.vcp-meta.yml' MOODLE_DIR = '/opt/moodle' CONF_RELATIVE = '/etc'", "= { 'shibboleth_container': yaml.safe_dump({ 'image': 'harbor.vcloud.nii.ac.jp/vcp/moodle:shibboleth-3.0.4', 'privileged': True, 'ports': ['443:443'],", "= parent / META_YML if not p.exists(): return False with", "generate_remote_path(container, conf_path, relative_to=CONF_RELATIVE): return (Path(MOODLE_DIR) / container / 'conf' /", "host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Downloading {remote_path} from {host_1} to {dest}') return", "get_local_path(host, container, conf_path, version) return generate_edit_link(conf) def show_local_conf_diff(host, container, conf_path,", "make_backup(local_path) make_simple_metainfo(local_path, remote_path) shibboleth_vars = setup_shibboleth_part(local_path, **shibboleth_params) generate_docker_compose(host, local_path, extra_vars,", "lines_b = f.readlines() diff = list(unified_diff( lines_a, lines_b, fromfile=path_a.name, tofile=path_b.name))", "conf_path, relative_to) return generate_edit_link(local_path) def apply_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None,", "+ '/docker-compose.yml' if version is None: version = find_latest_version_by_remote_path(host, remote_path)", "= download_conf_file(host, container, conf_path, relative_to) make_backup(local_path) make_metainfo(local_path, container, conf_path, relative_to)", "container, conf_path) return generate_local_path(host, conf_path, version) def _match_metainfo(parent, container, conf_path):", "list(local_path.parts)[-2], } with (local_path.parent / META_YML).open(mode='w') as f: yaml.safe_dump(params, stream=f,", "find_latest_version_by_remote_path(host, remote_path): return get_versions( host, remote_path, match=_match_metainfo_by_remote_path)[-1] def download_file(host, remote_path,", "save_shibboleth_part(conf_path): with conf_path.open() as f: data = yaml.safe_load(f) params =", "def generate_proxy_conf(host, conf_path, extra_vars): template = 'template/docker/compose/moodle-proxy.conf.template' ansible_arg = f'src={template}", "params and params['remote_path'] == remote_path) def get_versions(host, *args, match=_match_metainfo): pdir", "remote_path): p = parent / META_YML if not p.exists(): return", "version=None): if version is None: version = find_latest_version(host, container, conf_path)", "relative_to=CONF_RELATIVE, version=None): local_path = get_local_path(host, container, conf_path, version) backup_path =", "path_a.open() as f: lines_a = f.readlines() with path_b.open() as f:", "= generate_remote_path(container, conf_path, relative_to) ansible_arg = f'mkdir -p {dest.parent}' subprocess.run(", "ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Downloading {remote_path} from {host_1} to {dest}')", "= _to_backup(conf) if not quiet: logger.info(f'Copy {conf} {org}') shutil.copy2(conf, org)", "str(generate_remote_path(container, conf_path, relative_to)), 'version': list(local_path.parts)[-2], } with (local_path.parent / META_YML).open(mode='w')", "container, conf_path, version) upload_conf_file(local_path, host, container, conf_path, relative_to) if restart:", "ansible_arg] for k, v in extra_vars.items(): args.extend(['-e', f'{k}={v}']) subprocess.run(args=args, env=env,", "'hostname': hostname, }), } vars_path = conf_dir / 'shibboleth.yml' with", "template = 'template/docker/compose/moodle-proxy.conf.template' ansible_arg = f'src={template} dest={conf_path.parent}/moodle-proxy.conf' env = dict([(x,", "conf_path): p = parent / META_YML if not p.exists(): return", "= (Path(nb_conf['base_url']) / 'edit' / conf.absolute().relative_to(nb_conf['notebook_dir'])) return HTML(f'<a href={p} target=\"_blank\">{p.name}</a>')", "remote_path) local_path = ( Path(WORKDIR).absolute() / host / version /", "local_path = get_local_path(host, container, conf_path, version) upload_conf_file(local_path, host, container, conf_path,", "= {} if 'shibboleth' in data['services']: params['shibboleth_container'] = yaml.safe_dump( data['services']['shibboleth'])", "host, '-a', ansible_arg]) ansible_arg = f'dest={dest} src={src} backup=yes' out =", "yaml.safe_load(f) return ( isinstance(params, dict) and 'remote_path' in params and", "subprocess.run( ['ansible', host, '-a', ansible_arg]) ansible_arg = f'dest={dest} src={src} backup=yes'", "dict([(x, os.environ[x]) for x in ENV_INHERIT]) args = [ 'ansible',", "version=None): local_path = get_local_path(host, container, conf_path, version) backup_path = _to_backup(local_path)", "for k, v in extra_vars.items(): args.extend(['-e', f'{k}={v}']) for x in", "host, container, conf_path, relative_to=CONF_RELATIVE): dest = generate_remote_path(container, conf_path, relative_to) ansible_arg", "remote_path) make_backup(local_path) make_simple_metainfo(local_path, remote_path) shibboleth_vars = setup_shibboleth_part(local_path, **shibboleth_params) generate_docker_compose(host, local_path,", "return vars_path def setup_shibboleth_part(local_path, **params): if params is None or", "= MOODLE_DIR + '/docker-compose.yml' if version is None: version =", "def restart_container(host, container): cmd = f'chdir={MOODLE_DIR} docker-compose restart {container}' logger.info(f'Restart", "= yaml.safe_load(f) params = {} if 'shibboleth' in data['services']: params['shibboleth_container']", "dict) and 'container' in params and 'container_path' in params and", "= get_local_path(host, container, conf_path, version) return generate_edit_link(conf) def show_local_conf_diff(host, container,", "'-a', ansible_arg] for k, v in extra_vars.items(): args.extend(['-e', f'{k}={v}']) for", "from {local_path} to {host_1}') if not apply: return ansible_arg =", "lines_b, fromfile=path_a.name, tofile=path_b.name)) sys.stdout.writelines(diff) return len(diff) def upload_conf_file(src, host, container,", "with vars_path.open(mode='w') as f: yaml.safe_dump(params, f) return vars_path def setup_shibboleth_part(local_path,", "'ports': ['443:443'], 'volumes': shibboleth_volumes, 'container_name': 'shibboleth', 'hostname': hostname, }), }", "['ansible', host, '-a', ansible_arg]) ansible_arg = f'dest={dest} src={src} backup=yes' out", "local_path) def save_shibboleth_part(conf_path): with conf_path.open() as f: data = yaml.safe_load(f)", "remote_path, 'version': list(local_path.parts)[-2], } with (local_path.parent / META_YML).open(mode='w') as f:", "== container and params['container_path'] == conf_path) def _match_metainfo_by_remote_path(parent, remote_path): p", "show_diff(path_a, path_b): lines_a = [] lines_b = [] with path_a.open()", "None: version = find_latest_version_by_remote_path(host, remote_path) local_path = ( Path(WORKDIR).absolute() /", "version=None, apply=False): remote_path = MOODLE_DIR + '/docker-compose.yml' if version is", "data['services']: params['shibboleth_container'] = yaml.safe_dump( data['services']['shibboleth']) vars_path = conf_path.parent / 'extra_vars.yml'", "conf_path, extra_vars): template = 'template/docker/compose/moodle-proxy.conf.template' ansible_arg = f'src={template} dest={conf_path.parent}/moodle-proxy.conf' env", "version) with conf.open() as f: print(f.read()) def edit_local_conf(host, container, conf_path,", "version) def _match_metainfo(parent, container, conf_path): p = parent / META_YML", "host return sorted([ x.name for x in pdir.glob('*') if x.is_dir()", "k, v in extra_vars.items(): args.extend(['-e', f'{k}={v}']) for x in extra_vars_file:", "generate_docker_compose(host, local_path, extra_vars, [shibboleth_vars]) show_diff(_to_backup(local_path), local_path) return generate_edit_link(local_path) def append_shibboleth_container(host,", "conf_path, version) return link def apply_proxy_conf(host, version=None, restart=True): conf_path =", "not apply: return ansible_arg = f'chdir=/opt/moodle docker-compose up -d --remove-orphans'", "template = 'template/docker/compose/docker-compose.yml' ansible_arg = f'src={template} dest={conf_path.parent}/' env = dict([(x,", "params = yaml.safe_load(f) return ( isinstance(params, dict) and 'container' in", "extra_vars): template = 'template/docker/compose/moodle-proxy.conf.template' ansible_arg = f'src={template} dest={conf_path.parent}/moodle-proxy.conf' env =", "def append_shibboleth_container(host, moodle_url, volumes=[], extra_vars={}): hostname = urlparse(moodle_url).netloc return update_docker_compose(", "{container}') subprocess.check_call(['ansible', host, '-a', cmd]) def fetch_conf(host, container, conf_path, relative_to=CONF_RELATIVE,", "shibboleth_volumes.extend(volumes) params = { 'shibboleth_container': yaml.safe_dump({ 'image': 'harbor.vcloud.nii.ac.jp/vcp/moodle:shibboleth-3.0.4', 'privileged': True,", "version) show_diff(_to_backup(local_path), local_path) def save_shibboleth_part(conf_path): with conf_path.open() as f: data", "'container_path': conf_path, 'remote_path': str(generate_remote_path(container, conf_path, relative_to)), 'version': list(local_path.parts)[-2], } with", "{ 'container': container, 'container_path': conf_path, 'remote_path': str(generate_remote_path(container, conf_path, relative_to)), 'version':", "IPython.core.display import HTML WORKDIR = 'edit' META_YML = '.vcp-meta.yml' MOODLE_DIR", "conf_path, version) upload_conf_file(local_path, host, container, conf_path, relative_to) if restart: restart_container(host,", "if not apply: return ansible_arg = f'chdir=/opt/moodle docker-compose up -d", "and 'remote_path' in params and params['remote_path'] == remote_path) def get_versions(host,", "= ['/sys/fs/cgroup:/sys/fs/cgroup'] shibboleth_volumes.extend(volumes) params = { 'shibboleth_container': yaml.safe_dump({ 'image': 'harbor.vcloud.nii.ac.jp/vcp/moodle:shibboleth-3.0.4',", "= list(notebookapp.list_running_servers())[0] p = (Path(nb_conf['base_url']) / 'edit' / conf.absolute().relative_to(nb_conf['notebook_dir'])) return", "container, str(conf_path)) local_path = generate_local_path(host, conf_path, version) generate_proxy_conf(host, local_path, extra_vars)", "import yaml from urllib.parse import urlparse from notebook import notebookapp", "'-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Downloading {remote_path} from {host_1} to", "{ 'remote_path': remote_path, 'version': list(local_path.parts)[-2], } with (local_path.parent / META_YML).open(mode='w')", "extra_vars) show_local_conf_diff(host, container, conf_path, version) return link def apply_proxy_conf(host, version=None,", "p.exists(): return False with p.open() as f: params = yaml.safe_load(f)", "ansible_arg = f'src={template} dest={conf_path.parent}/moodle-proxy.conf' env = dict([(x, os.environ[x]) for x", "= list(unified_diff( lines_a, lines_b, fromfile=path_a.name, tofile=path_b.name)) sys.stdout.writelines(diff) return len(diff) def", "from notebook import notebookapp from IPython.core.display import HTML WORKDIR =", "make_metainfo(local_path, container, conf_path, relative_to) return generate_edit_link(local_path) def apply_conf(host, container, conf_path,", "/ META_YML).open(mode='w') as f: yaml.safe_dump(params, stream=f, default_flow_style=False) def generate_edit_link(conf): nb_conf", "None: version = find_latest_version(host, container, conf_path) return generate_local_path(host, conf_path, version)", "match=_match_metainfo): pdir = Path(WORKDIR).absolute() / host return sorted([ x.name for", "version / 'docker-compose.yml') ansible_arg = f'dest={remote_path} src={local_path} backup=yes' out =", "relative_to) ansible_arg = f'mkdir -p {dest.parent}' subprocess.run( ['ansible', host, '-a',", "conf_path, relative_to=CONF_RELATIVE, version=None): local_path = get_local_path(host, container, conf_path, version) backup_path", "upload_docker_compose(host, version=None, apply=False): remote_path = MOODLE_DIR + '/docker-compose.yml' if version", "host, '-a', cmd]) def fetch_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path", "args.extend(['-e', f'{k}={v}']) subprocess.run(args=args, env=env, check=True) def update_proxy_conf(host, extra_vars={}): conf_path =", "conf_path, version) return generate_edit_link(conf) def show_local_conf_diff(host, container, conf_path, version=None): local_path", "}), } vars_path = conf_dir / 'shibboleth.yml' with vars_path.open(mode='w') as", "conf_path): return get_versions(host, container, conf_path)[-1] def find_latest_version_by_remote_path(host, remote_path): return get_versions(", "as f: lines_a = f.readlines() with path_b.open() as f: lines_b", "for x in extra_vars_file: args.extend(['-e', f'@{str(x)}']) subprocess.run(args=args, env=env, check=True) def", "MOODLE_DIR = '/opt/moodle' CONF_RELATIVE = '/etc' ENV_INHERIT = ['VAULT_ADDR', 'VAULT_TOKEN',", "= yaml.safe_load(f) return ( isinstance(params, dict) and 'container' in params", "if x.is_dir() and match(x, *args)]) def find_latest_version(host, container, conf_path): return", "= [] with path_a.open() as f: lines_a = f.readlines() with", "WORKDIR = 'edit' META_YML = '.vcp-meta.yml' MOODLE_DIR = '/opt/moodle' CONF_RELATIVE", "len(params) == 0: return save_shibboleth_part(local_path) else: return init_shibboleth_part(local_path.parent, **params) def", "+ '.revert')) def show_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): conf =", "['ansible', host, '-m', 'fetch', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Downloading", "def make_metainfo(local_path, container, conf_path, relative_to=CONF_RELATIVE): params = { 'container': container,", "def generate_edit_link(conf): nb_conf = list(notebookapp.list_running_servers())[0] p = (Path(nb_conf['base_url']) / 'edit'", "= ['ansible', host, '-m', 'template', '-c', 'local', '-a', ansible_arg] for", "container, conf_path)[-1] def find_latest_version_by_remote_path(host, remote_path): return get_versions( host, remote_path, match=_match_metainfo_by_remote_path)[-1]", "find_latest_version_by_remote_path(host, remote_path) local_path = ( Path(WORKDIR).absolute() / host / version", "generate_remote_path(container, conf_path, relative_to) ansible_arg = f'mkdir -p {dest.parent}' subprocess.run( ['ansible',", "subprocess.check_call(['ansible', host, '-a', cmd]) def fetch_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False):", "conf_path) dest.parent.mkdir(parents=True, exist_ok=True) dest.touch() return dest def _to_backup(conf): return conf.parent", "with path_a.open() as f: lines_a = f.readlines() with path_b.open() as", "show_diff(_to_backup(local_path), local_path) def save_shibboleth_part(conf_path): with conf_path.open() as f: data =", "conf_path) def create_conf_file(host, conf_path): dest = generate_local_path(host, conf_path) dest.parent.mkdir(parents=True, exist_ok=True)", "= setup_shibboleth_part(local_path, **shibboleth_params) generate_docker_compose(host, local_path, extra_vars, [shibboleth_vars]) show_diff(_to_backup(local_path), local_path) return", "extra_vars.items(): args.extend(['-e', f'{k}={v}']) subprocess.run(args=args, env=env, check=True) def update_proxy_conf(host, extra_vars={}): conf_path", "f) return vars_path def setup_shibboleth_part(local_path, **params): if params is None", "with (local_path.parent / META_YML).open(mode='w') as f: yaml.safe_dump(params, stream=f, default_flow_style=False) def", "local_path = create_conf_file(host, conf_path) make_backup(local_path, quiet=True) make_metainfo(local_path, container, conf_path, relative_to)", "def download_conf_file(host, container, conf_path, relative_to=CONF_RELATIVE): src = generate_remote_path(container, conf_path, relative_to)", "{src} to {host_1}') def restart_container(host, container): cmd = f'chdir={MOODLE_DIR} docker-compose", "= get_local_path(host, container, conf_path, version) upload_conf_file(local_path, host, container, conf_path, relative_to)", "f: data = yaml.safe_load(f) params = {} if 'shibboleth' in", "'shibboleth', 'hostname': hostname, }), } vars_path = conf_dir / 'shibboleth.yml'", "def generate_local_path(host, conf_path, version=None): ret = Path(WORKDIR).absolute() / host if", "moodle_url, volumes=[], extra_vars={}): hostname = urlparse(moodle_url).netloc return update_docker_compose( host, extra_vars,", "docker-compose up -d --remove-orphans' args = ['ansible', host, '-a', ansible_arg]", "'proxy' link = fetch_conf(host, container, str(conf_path), str(conf_path.parent)) version = find_latest_version(host,", "make_backup(conf, quiet=False): org = _to_backup(conf) if not quiet: logger.info(f'Copy {conf}", "restart: restart_container(host, container) def revert_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): local_path", "relative_to) return generate_edit_link(local_path) def apply_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None, restart=True):", "= conf_dir / 'shibboleth.yml' with vars_path.open(mode='w') as f: yaml.safe_dump(params, f)", "for x in pdir.glob('*') if x.is_dir() and match(x, *args)]) def", "_to_backup(conf): return conf.parent / (conf.name + '.orig') def make_backup(conf, quiet=False):", "conf_path, relative_to=CONF_RELATIVE): dest = generate_remote_path(container, conf_path, relative_to) ansible_arg = f'mkdir", "'.revert')) def show_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host,", "conf_path, relative_to=CONF_RELATIVE, create=False): local_path = create_conf_file(host, conf_path) make_backup(local_path, quiet=True) make_metainfo(local_path,", "make_simple_metainfo(local_path, remote_path) shibboleth_vars = setup_shibboleth_part(local_path, **shibboleth_params) generate_docker_compose(host, local_path, extra_vars, [shibboleth_vars])", "isinstance(params, dict) and 'container' in params and 'container_path' in params", "subprocess.run(args=args, check=True) def generate_proxy_conf(host, conf_path, extra_vars): template = 'template/docker/compose/moodle-proxy.conf.template' ansible_arg", "check=True) def update_proxy_conf(host, extra_vars={}): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') container = 'proxy'", "restart=True): diff = show_local_conf_diff(host, container, conf_path, version) local_path = get_local_path(host,", "'container' in params and 'container_path' in params and params['container'] ==", "generate_edit_link(local_path) def apply_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None, restart=True): diff =", "if not p.exists(): return False with p.open() as f: params", "'/docker-compose.yml' if version is None: version = find_latest_version_by_remote_path(host, remote_path) local_path", "save_shibboleth_part(local_path) else: return init_shibboleth_part(local_path.parent, **params) def generate_docker_compose(host, conf_path, extra_vars, extra_vars_file):", "local_path = get_local_path(host, container, conf_path, version) show_diff(_to_backup(local_path), local_path) def save_shibboleth_part(conf_path):", "args.extend(['-e', f'{k}={v}']) for x in extra_vars_file: args.extend(['-e', f'@{str(x)}']) subprocess.run(args=args, env=env,", "'image': 'harbor.vcloud.nii.ac.jp/vcp/moodle:shibboleth-3.0.4', 'privileged': True, 'ports': ['443:443'], 'volumes': shibboleth_volumes, 'container_name': 'shibboleth',", "urllib.parse import urlparse from notebook import notebookapp from IPython.core.display import", "params = { 'shibboleth_container': yaml.safe_dump({ 'image': 'harbor.vcloud.nii.ac.jp/vcp/moodle:shibboleth-3.0.4', 'privileged': True, 'ports':", "params = { 'remote_path': remote_path, 'version': list(local_path.parts)[-2], } with (local_path.parent", "local_path = generate_local_path(host, conf_path, version) generate_proxy_conf(host, local_path, extra_vars) show_local_conf_diff(host, container,", "conf_path, relative_to=CONF_RELATIVE): src = generate_remote_path(container, conf_path, relative_to) return download_file(host, src,", "if restart: restart_container(host, container) def revert_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None):", "conf_path, relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host, container, conf_path, version) with", "generate_local_path(host, conf_path, version) generate_proxy_conf(host, local_path, extra_vars) show_local_conf_diff(host, container, conf_path, version)", "subprocess.run(args=args, env=env, check=True) def update_docker_compose(host, extra_vars={}, shibboleth_params={}): remote_path = MOODLE_DIR", "'-a', ansible_arg]) ansible_arg = f'dest={dest} src={src} backup=yes' out = subprocess.check_output(", "for x in ENV_INHERIT]) args = ['ansible', host, '-m', 'template',", "getLogger(__name__) basicConfig(level=INFO, format='%(message)s') def generate_local_path(host, conf_path, version=None): ret = Path(WORKDIR).absolute()", "as f: params = yaml.safe_load(f) return ( isinstance(params, dict) and", "['443:443'], 'volumes': shibboleth_volumes, 'container_name': 'shibboleth', 'hostname': hostname, }), } vars_path", "get_local_path(host, container, conf_path, version=None): if version is None: version =", "flat=yes' out = subprocess.check_output( ['ansible', host, '-m', 'fetch', '-a', ansible_arg])", "container, conf_path, version=None): if version is None: version = find_latest_version(host,", "pathlib import Path import shutil import subprocess import sys import", "path_b.open() as f: lines_b = f.readlines() diff = list(unified_diff( lines_a,", "return link def apply_proxy_conf(host, version=None, restart=True): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') apply_conf(host,", "f: yaml.safe_dump(params, f) return vars_path def init_shibboleth_part(conf_dir, hostname, volumes): shibboleth_volumes", "container, conf_path, version=None): local_path = get_local_path(host, container, conf_path, version) show_diff(_to_backup(local_path),", "shibboleth_volumes = ['/sys/fs/cgroup:/sys/fs/cgroup'] shibboleth_volumes.extend(volumes) params = { 'shibboleth_container': yaml.safe_dump({ 'image':", "yaml.safe_dump(params, stream=f, default_flow_style=False) def make_simple_metainfo(local_path, remote_path): params = { 'remote_path':", "and 'container' in params and 'container_path' in params and params['container']", "params and params['container'] == container and params['container_path'] == conf_path) def", "container, conf_path, version) return generate_edit_link(conf) def show_local_conf_diff(host, container, conf_path, version=None):", "vars_path = conf_path.parent / 'extra_vars.yml' with vars_path.open(mode='w') as f: yaml.safe_dump(params,", "download_file(host, remote_path) make_backup(local_path) make_simple_metainfo(local_path, remote_path) shibboleth_vars = setup_shibboleth_part(local_path, **shibboleth_params) generate_docker_compose(host,", "with vars_path.open(mode='w') as f: yaml.safe_dump(params, f) return vars_path def init_shibboleth_part(conf_dir,", "= out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Downloading {remote_path} from {host_1} to {dest}') return dest", "as f: lines_b = f.readlines() diff = list(unified_diff( lines_a, lines_b,", "if conf_path is None: conf_path = Path(remote_path).name dest = generate_local_path(host,", "conf_path, relative_to) return generate_edit_link(local_path) def create_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False):", "host, '-m', 'template', '-c', 'local', '-a', ansible_arg] for k, v", "f'dest={dest} src={src} backup=yes' out = subprocess.check_output( ['ansible', host, '-m', 'copy',", "print(f.read()) def edit_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host,", "return sorted([ x.name for x in pdir.glob('*') if x.is_dir() and", "conf_path, relative_to) make_backup(local_path) make_metainfo(local_path, container, conf_path, relative_to) return generate_edit_link(local_path) def", "'copy', '-b', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {remote_path} from", "ansible_arg = f'chdir=/opt/moodle docker-compose up -d --remove-orphans' args = ['ansible',", "conf_path, relative_to) ansible_arg = f'mkdir -p {dest.parent}' subprocess.run( ['ansible', host,", "/ container / 'conf' / Path(conf_path).relative_to(relative_to)) def get_local_path(host, container, conf_path,", "volumes=[], extra_vars={}): hostname = urlparse(moodle_url).netloc return update_docker_compose( host, extra_vars, shibboleth_params={'hostname':", "import os from pathlib import Path import shutil import subprocess", "update_docker_compose( host, extra_vars, shibboleth_params={'hostname': hostname, 'volumes': volumes}, ) def upload_docker_compose(host,", "shutil import subprocess import sys import yaml from urllib.parse import", "list(notebookapp.list_running_servers())[0] p = (Path(nb_conf['base_url']) / 'edit' / conf.absolute().relative_to(nb_conf['notebook_dir'])) return HTML(f'<a", "urlparse(moodle_url).netloc return update_docker_compose( host, extra_vars, shibboleth_params={'hostname': hostname, 'volumes': volumes}, )", "hostname = urlparse(moodle_url).netloc return update_docker_compose( host, extra_vars, shibboleth_params={'hostname': hostname, 'volumes':", "local_path = ( Path(WORKDIR).absolute() / host / version / 'docker-compose.yml')", "= yaml.safe_dump( data['services']['shibboleth']) vars_path = conf_path.parent / 'extra_vars.yml' with vars_path.open(mode='w')", "make_backup(local_path) make_metainfo(local_path, container, conf_path, relative_to) return generate_edit_link(local_path) def create_conf(host, container,", "return generate_edit_link(conf) def show_local_conf_diff(host, container, conf_path, version=None): local_path = get_local_path(host,", "= generate_local_path(host, conf_path, version) generate_proxy_conf(host, local_path, extra_vars) show_local_conf_diff(host, container, conf_path,", "in params and params['container'] == container and params['container_path'] == conf_path)", "remote_path = MOODLE_DIR + '/docker-compose.yml' local_path = download_file(host, remote_path) make_backup(local_path)", "'PATH', 'REQUESTS_CA_BUNDLE'] logger = getLogger(__name__) basicConfig(level=INFO, format='%(message)s') def generate_local_path(host, conf_path,", "container, conf_path, relative_to) if restart: restart_container(host, container) def revert_conf(host, container,", "conf_path, relative_to) restart_container(host, container) local_path.rename(local_path.parent / (local_path.name + '.revert')) def", "apply_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None, restart=True): diff = show_local_conf_diff(host, container,", "return vars_path def init_shibboleth_part(conf_dir, hostname, volumes): shibboleth_volumes = ['/sys/fs/cgroup:/sys/fs/cgroup'] shibboleth_volumes.extend(volumes)", "from {src} to {host_1}') def restart_container(host, container): cmd = f'chdir={MOODLE_DIR}", "container) def revert_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): local_path = get_local_path(host,", "local_path = get_local_path(host, container, conf_path, version) backup_path = _to_backup(local_path) show_diff(local_path,", "'ansible', host, '-m', 'template', '-c', 'local', '-a', ansible_arg] for k,", "link def apply_proxy_conf(host, version=None, restart=True): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') apply_conf(host, 'proxy',", "(local_path.parent / META_YML).open(mode='w') as f: yaml.safe_dump(params, stream=f, default_flow_style=False) def make_simple_metainfo(local_path,", "def update_proxy_conf(host, extra_vars={}): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') container = 'proxy' link", "if version is None: version = find_latest_version_by_remote_path(host, remote_path) local_path =", "docker-compose restart {container}' logger.info(f'Restart container {container}') subprocess.check_call(['ansible', host, '-a', cmd])", "get_local_path(host, container, conf_path, version) show_diff(_to_backup(local_path), local_path) def save_shibboleth_part(conf_path): with conf_path.open()", "yaml.safe_dump(params, f) return vars_path def setup_shibboleth_part(local_path, **params): if params is", "= get_local_path(host, container, conf_path, version) with conf.open() as f: print(f.read())", "f: yaml.safe_dump(params, stream=f, default_flow_style=False) def generate_edit_link(conf): nb_conf = list(notebookapp.list_running_servers())[0] p", "'-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {dest} from {src} to", "data = yaml.safe_load(f) params = {} if 'shibboleth' in data['services']:", "def make_simple_metainfo(local_path, remote_path): params = { 'remote_path': remote_path, 'version': list(local_path.parts)[-2],", "make_backup(local_path, quiet=True) make_metainfo(local_path, container, conf_path, relative_to) return generate_edit_link(local_path) def apply_conf(host,", "container, conf_path, relative_to) return generate_edit_link(local_path) def apply_conf(host, container, conf_path, relative_to=CONF_RELATIVE,", "with p.open() as f: params = yaml.safe_load(f) return ( isinstance(params,", "(local_path.name + '.revert')) def show_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): conf", "version) return generate_edit_link(conf) def show_local_conf_diff(host, container, conf_path, version=None): local_path =", "return generate_edit_link(local_path) def apply_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None, restart=True): diff", "subprocess import sys import yaml from urllib.parse import urlparse from", "conf_path, relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host, container, conf_path, version) return", "{dest.parent}' subprocess.run( ['ansible', host, '-a', ansible_arg]) ansible_arg = f'dest={dest} src={src}", "lines_a = [] lines_b = [] with path_a.open() as f:", "logging import basicConfig, getLogger, INFO import os from pathlib import", "with conf_path.open() as f: data = yaml.safe_load(f) params = {}", "conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') container = 'proxy' link = fetch_conf(host, container,", "version is None: ret /= datetime.now().strftime(\"%Y%m%d%H%M%S%f\") else: ret /= version", "host if version is None: ret /= datetime.now().strftime(\"%Y%m%d%H%M%S%f\") else: ret", "*args, match=_match_metainfo): pdir = Path(WORKDIR).absolute() / host return sorted([ x.name", "extra_vars={}): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') container = 'proxy' link = fetch_conf(host,", "datetime.now().strftime(\"%Y%m%d%H%M%S%f\") else: ret /= version ret /= Path(conf_path).name return ret", "'-a', ansible_arg] for k, v in extra_vars.items(): args.extend(['-e', f'{k}={v}']) subprocess.run(args=args,", "( isinstance(params, dict) and 'remote_path' in params and params['remote_path'] ==", "not p.exists(): return False with p.open() as f: params =", "out = subprocess.check_output( ['ansible', host, '-m', 'copy', '-b', '-a', ansible_arg])", "in data['services']: params['shibboleth_container'] = yaml.safe_dump( data['services']['shibboleth']) vars_path = conf_path.parent /", "create_conf_file(host, conf_path) make_backup(local_path, quiet=True) make_metainfo(local_path, container, conf_path, relative_to) return generate_edit_link(local_path)", "ret /= version ret /= Path(conf_path).name return ret def generate_remote_path(container,", "False with p.open() as f: params = yaml.safe_load(f) return (", "'.vcp-meta.yml' MOODLE_DIR = '/opt/moodle' CONF_RELATIVE = '/etc' ENV_INHERIT = ['VAULT_ADDR',", "container, conf_path): p = parent / META_YML if not p.exists():", "target=\"_blank\">{p.name}</a>') def show_diff(path_a, path_b): lines_a = [] lines_b = []", "= create_conf_file(host, conf_path) make_backup(local_path, quiet=True) make_metainfo(local_path, container, conf_path, relative_to) return", "f.readlines() with path_b.open() as f: lines_b = f.readlines() diff =", "as f: yaml.safe_dump(params, f) return vars_path def init_shibboleth_part(conf_dir, hostname, volumes):", "as f: data = yaml.safe_load(f) params = {} if 'shibboleth'", "check=True) def generate_proxy_conf(host, conf_path, extra_vars): template = 'template/docker/compose/moodle-proxy.conf.template' ansible_arg =", "p.open() as f: params = yaml.safe_load(f) return ( isinstance(params, dict)", "container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path = create_conf_file(host, conf_path) make_backup(local_path, quiet=True)", "f'@{str(x)}']) subprocess.run(args=args, env=env, check=True) def update_docker_compose(host, extra_vars={}, shibboleth_params={}): remote_path =", "relative_to) return download_file(host, src, conf_path) def create_conf_file(host, conf_path): dest =", "conf_path) ansible_arg = f'src={remote_path} dest={dest} flat=yes' out = subprocess.check_output( ['ansible',", "= download_file(host, remote_path) make_backup(local_path) make_simple_metainfo(local_path, remote_path) shibboleth_vars = setup_shibboleth_part(local_path, **shibboleth_params)", "return HTML(f'<a href={p} target=\"_blank\">{p.name}</a>') def show_diff(path_a, path_b): lines_a = []", "container, conf_path, relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host, container, conf_path, version)", "def generate_remote_path(container, conf_path, relative_to=CONF_RELATIVE): return (Path(MOODLE_DIR) / container / 'conf'", "'/opt/moodle' CONF_RELATIVE = '/etc' ENV_INHERIT = ['VAULT_ADDR', 'VAULT_TOKEN', 'PATH', 'REQUESTS_CA_BUNDLE']", "org) def make_metainfo(local_path, container, conf_path, relative_to=CONF_RELATIVE): params = { 'container':", "= out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {dest} from {src} to {host_1}') def restart_container(host,", "'-m', 'template', '-c', 'local', '-a', ansible_arg] for k, v in", "'container_path' in params and params['container'] == container and params['container_path'] ==", "container, conf_path, relative_to) make_backup(local_path) make_metainfo(local_path, container, conf_path, relative_to) return generate_edit_link(local_path)", "conf_path.open() as f: data = yaml.safe_load(f) params = {} if", "dict([(x, os.environ[x]) for x in ENV_INHERIT]) args = ['ansible', host,", "local_path, extra_vars, [shibboleth_vars]) show_diff(_to_backup(local_path), local_path) return generate_edit_link(local_path) def append_shibboleth_container(host, moodle_url,", "f: params = yaml.safe_load(f) return ( isinstance(params, dict) and 'remote_path'", "= _to_backup(local_path) show_diff(local_path, backup_path) upload_conf_file(backup_path, host, container, conf_path, relative_to) restart_container(host,", "create=False): local_path = download_conf_file(host, container, conf_path, relative_to) make_backup(local_path) make_metainfo(local_path, container,", "= ['VAULT_ADDR', 'VAULT_TOKEN', 'PATH', 'REQUESTS_CA_BUNDLE'] logger = getLogger(__name__) basicConfig(level=INFO, format='%(message)s')", "return generate_edit_link(local_path) def create_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path =", "host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {remote_path} from {local_path} to {host_1}') if", "vars_path.open(mode='w') as f: yaml.safe_dump(params, f) return vars_path def init_shibboleth_part(conf_dir, hostname,", "cmd]) def fetch_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path = download_conf_file(host,", "def show_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host, container,", "{host_1}') if not apply: return ansible_arg = f'chdir=/opt/moodle docker-compose up", "args.extend(['-e', f'@{str(x)}']) subprocess.run(args=args, env=env, check=True) def update_docker_compose(host, extra_vars={}, shibboleth_params={}): remote_path", "relative_to) restart_container(host, container) local_path.rename(local_path.parent / (local_path.name + '.revert')) def show_local_conf(host,", "remote_path, conf_path=None): if conf_path is None: conf_path = Path(remote_path).name dest", "conf_path, relative_to=CONF_RELATIVE): params = { 'container': container, 'container_path': conf_path, 'remote_path':", "logger.info(f'Uploading {dest} from {src} to {host_1}') def restart_container(host, container): cmd", "ansible_arg = f'src={remote_path} dest={dest} flat=yes' out = subprocess.check_output( ['ansible', host,", "['ansible', host, '-a', ansible_arg] logger.info('Apply the changes in docker-compose.yml.') subprocess.run(args=args,", "= find_latest_version(host, container, str(conf_path)) local_path = generate_local_path(host, conf_path, version) generate_proxy_conf(host,", "Path('/usr/local/apache2/conf/moodle-proxy.conf') container = 'proxy' link = fetch_conf(host, container, str(conf_path), str(conf_path.parent))", "META_YML = '.vcp-meta.yml' MOODLE_DIR = '/opt/moodle' CONF_RELATIVE = '/etc' ENV_INHERIT", "def get_versions(host, *args, match=_match_metainfo): pdir = Path(WORKDIR).absolute() / host return", "for x in ENV_INHERIT]) args = [ 'ansible', host, '-m',", "'shibboleth' in data['services']: params['shibboleth_container'] = yaml.safe_dump( data['services']['shibboleth']) vars_path = conf_path.parent", "not quiet: logger.info(f'Copy {conf} {org}') shutil.copy2(conf, org) def make_metainfo(local_path, container,", "version) generate_proxy_conf(host, local_path, extra_vars) show_local_conf_diff(host, container, conf_path, version) return link", "in extra_vars_file: args.extend(['-e', f'@{str(x)}']) subprocess.run(args=args, env=env, check=True) def update_docker_compose(host, extra_vars={},", "pdir = Path(WORKDIR).absolute() / host return sorted([ x.name for x", "= f'chdir=/opt/moodle docker-compose up -d --remove-orphans' args = ['ansible', host,", "generate_docker_compose(host, conf_path, extra_vars, extra_vars_file): template = 'template/docker/compose/docker-compose.yml' ansible_arg = f'src={template}", "{remote_path} from {local_path} to {host_1}') if not apply: return ansible_arg", "get_local_path(host, container, conf_path, version) backup_path = _to_backup(local_path) show_diff(local_path, backup_path) upload_conf_file(backup_path,", "conf.parent / (conf.name + '.orig') def make_backup(conf, quiet=False): org =", "show_diff(local_path, backup_path) upload_conf_file(backup_path, host, container, conf_path, relative_to) restart_container(host, container) local_path.rename(local_path.parent", "return dest def download_conf_file(host, container, conf_path, relative_to=CONF_RELATIVE): src = generate_remote_path(container,", "logger.info('Apply the changes in docker-compose.yml.') subprocess.run(args=args, check=True) def generate_proxy_conf(host, conf_path,", "conf_path, 'remote_path': str(generate_remote_path(container, conf_path, relative_to)), 'version': list(local_path.parts)[-2], } with (local_path.parent", "'volumes': volumes}, ) def upload_docker_compose(host, version=None, apply=False): remote_path = MOODLE_DIR", "difflib import unified_diff from logging import basicConfig, getLogger, INFO import", "subprocess.check_output( ['ansible', host, '-m', 'fetch', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0]", "return ret def generate_remote_path(container, conf_path, relative_to=CONF_RELATIVE): return (Path(MOODLE_DIR) / container", "/ conf.absolute().relative_to(nb_conf['notebook_dir'])) return HTML(f'<a href={p} target=\"_blank\">{p.name}</a>') def show_diff(path_a, path_b): lines_a", "is None: conf_path = Path(remote_path).name dest = generate_local_path(host, conf_path) ansible_arg", "fromfile=path_a.name, tofile=path_b.name)) sys.stdout.writelines(diff) return len(diff) def upload_conf_file(src, host, container, conf_path,", "host, '-m', 'fetch', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Downloading {remote_path}", "exist_ok=True) dest.touch() return dest def _to_backup(conf): return conf.parent / (conf.name", "/ 'docker-compose.yml') ansible_arg = f'dest={remote_path} src={local_path} backup=yes' out = subprocess.check_output(", "= f'src={remote_path} dest={dest} flat=yes' out = subprocess.check_output( ['ansible', host, '-m',", "restart_container(host, container) local_path.rename(local_path.parent / (local_path.name + '.revert')) def show_local_conf(host, container,", "**params): if params is None or len(params) == 0: return", "= dict([(x, os.environ[x]) for x in ENV_INHERIT]) args = [", "relative_to=CONF_RELATIVE, create=False): local_path = download_conf_file(host, container, conf_path, relative_to) make_backup(local_path) make_metainfo(local_path,", "version=None): conf = get_local_path(host, container, conf_path, version) return generate_edit_link(conf) def", "is None: version = find_latest_version(host, container, conf_path) return generate_local_path(host, conf_path,", "conf.open() as f: print(f.read()) def edit_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None):", "href={p} target=\"_blank\">{p.name}</a>') def show_diff(path_a, path_b): lines_a = [] lines_b =", "import datetime from difflib import unified_diff from logging import basicConfig,", "= f.readlines() diff = list(unified_diff( lines_a, lines_b, fromfile=path_a.name, tofile=path_b.name)) sys.stdout.writelines(diff)", "Path(WORKDIR).absolute() / host / version / 'docker-compose.yml') ansible_arg = f'dest={remote_path}", "def find_latest_version_by_remote_path(host, remote_path): return get_versions( host, remote_path, match=_match_metainfo_by_remote_path)[-1] def download_file(host,", "default_flow_style=False) def make_simple_metainfo(local_path, remote_path): params = { 'remote_path': remote_path, 'version':", "append_shibboleth_container(host, moodle_url, volumes=[], extra_vars={}): hostname = urlparse(moodle_url).netloc return update_docker_compose( host,", "from urllib.parse import urlparse from notebook import notebookapp from IPython.core.display", "generate_local_path(host, conf_path) dest.parent.mkdir(parents=True, exist_ok=True) dest.touch() return dest def _to_backup(conf): return", "the changes in docker-compose.yml.') subprocess.run(args=args, check=True) def generate_proxy_conf(host, conf_path, extra_vars):", "def download_file(host, remote_path, conf_path=None): if conf_path is None: conf_path =", "conf_path, relative_to) return download_file(host, src, conf_path) def create_conf_file(host, conf_path): dest", "if params is None or len(params) == 0: return save_shibboleth_part(local_path)", "backup_path) upload_conf_file(backup_path, host, container, conf_path, relative_to) restart_container(host, container) local_path.rename(local_path.parent /", "download_file(host, src, conf_path) def create_conf_file(host, conf_path): dest = generate_local_path(host, conf_path)", "extra_vars, shibboleth_params={'hostname': hostname, 'volumes': volumes}, ) def upload_docker_compose(host, version=None, apply=False):", "conf_path, extra_vars, extra_vars_file): template = 'template/docker/compose/docker-compose.yml' ansible_arg = f'src={template} dest={conf_path.parent}/'", "def show_diff(path_a, path_b): lines_a = [] lines_b = [] with", "[] lines_b = [] with path_a.open() as f: lines_a =", "import urlparse from notebook import notebookapp from IPython.core.display import HTML", "remote_path, match=_match_metainfo_by_remote_path)[-1] def download_file(host, remote_path, conf_path=None): if conf_path is None:", "list(unified_diff( lines_a, lines_b, fromfile=path_a.name, tofile=path_b.name)) sys.stdout.writelines(diff) return len(diff) def upload_conf_file(src,", "remote_path): return get_versions( host, remote_path, match=_match_metainfo_by_remote_path)[-1] def download_file(host, remote_path, conf_path=None):", "version=None): local_path = get_local_path(host, container, conf_path, version) show_diff(_to_backup(local_path), local_path) def", "conf_path, version) def _match_metainfo(parent, container, conf_path): p = parent /", "container, conf_path): return get_versions(host, container, conf_path)[-1] def find_latest_version_by_remote_path(host, remote_path): return", "version = find_latest_version(host, container, str(conf_path)) local_path = generate_local_path(host, conf_path, version)", "container, 'container_path': conf_path, 'remote_path': str(generate_remote_path(container, conf_path, relative_to)), 'version': list(local_path.parts)[-2], }", "from difflib import unified_diff from logging import basicConfig, getLogger, INFO", "*args)]) def find_latest_version(host, container, conf_path): return get_versions(host, container, conf_path)[-1] def", "['ansible', host, '-m', 'copy', '-b', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0]", "== remote_path) def get_versions(host, *args, match=_match_metainfo): pdir = Path(WORKDIR).absolute() /", "return update_docker_compose( host, extra_vars, shibboleth_params={'hostname': hostname, 'volumes': volumes}, ) def", "= find_latest_version_by_remote_path(host, remote_path) local_path = ( Path(WORKDIR).absolute() / host /", "= Path(WORKDIR).absolute() / host return sorted([ x.name for x in", "relative_to) make_backup(local_path) make_metainfo(local_path, container, conf_path, relative_to) return generate_edit_link(local_path) def create_conf(host,", "generate_local_path(host, conf_path, version=None): ret = Path(WORKDIR).absolute() / host if version", "import Path import shutil import subprocess import sys import yaml", "f: params = yaml.safe_load(f) return ( isinstance(params, dict) and 'container'", "= f'dest={dest} src={src} backup=yes' out = subprocess.check_output( ['ansible', host, '-m',", "else: return init_shibboleth_part(local_path.parent, **params) def generate_docker_compose(host, conf_path, extra_vars, extra_vars_file): template", "host, container, conf_path, relative_to) restart_container(host, container) local_path.rename(local_path.parent / (local_path.name +", "( Path(WORKDIR).absolute() / host / version / 'docker-compose.yml') ansible_arg =", "Path(conf_path).relative_to(relative_to)) def get_local_path(host, container, conf_path, version=None): if version is None:", "local_path, extra_vars) show_local_conf_diff(host, container, conf_path, version) return link def apply_proxy_conf(host,", "return init_shibboleth_part(local_path.parent, **params) def generate_docker_compose(host, conf_path, extra_vars, extra_vars_file): template =", "conf_path, version) generate_proxy_conf(host, local_path, extra_vars) show_local_conf_diff(host, container, conf_path, version) return", "yaml.safe_dump( data['services']['shibboleth']) vars_path = conf_path.parent / 'extra_vars.yml' with vars_path.open(mode='w') as", "setup_shibboleth_part(local_path, **params): if params is None or len(params) == 0:", "--remove-orphans' args = ['ansible', host, '-a', ansible_arg] logger.info('Apply the changes", "x in ENV_INHERIT]) args = [ 'ansible', host, '-m', 'template',", "f'{k}={v}']) for x in extra_vars_file: args.extend(['-e', f'@{str(x)}']) subprocess.run(args=args, env=env, check=True)", "setup_shibboleth_part(local_path, **shibboleth_params) generate_docker_compose(host, local_path, extra_vars, [shibboleth_vars]) show_diff(_to_backup(local_path), local_path) return generate_edit_link(local_path)", "tofile=path_b.name)) sys.stdout.writelines(diff) return len(diff) def upload_conf_file(src, host, container, conf_path, relative_to=CONF_RELATIVE):", "is None or len(params) == 0: return save_shibboleth_part(local_path) else: return", "'privileged': True, 'ports': ['443:443'], 'volumes': shibboleth_volumes, 'container_name': 'shibboleth', 'hostname': hostname,", "logger = getLogger(__name__) basicConfig(level=INFO, format='%(message)s') def generate_local_path(host, conf_path, version=None): ret", "conf_path) return generate_local_path(host, conf_path, version) def _match_metainfo(parent, container, conf_path): p", "params = {} if 'shibboleth' in data['services']: params['shibboleth_container'] = yaml.safe_dump(", "/ host / version / 'docker-compose.yml') ansible_arg = f'dest={remote_path} src={local_path}", "version=None, restart=True): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') apply_conf(host, 'proxy', str(conf_path), str(conf_path.parent), version,", "= Path(WORKDIR).absolute() / host if version is None: ret /=", "= find_latest_version(host, container, conf_path) return generate_local_path(host, conf_path, version) def _match_metainfo(parent,", "restart_container(host, container) def revert_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): local_path =", "+ '/docker-compose.yml' local_path = download_file(host, remote_path) make_backup(local_path) make_simple_metainfo(local_path, remote_path) shibboleth_vars", "changes in docker-compose.yml.') subprocess.run(args=args, check=True) def generate_proxy_conf(host, conf_path, extra_vars): template", "in params and 'container_path' in params and params['container'] == container", "conf_path, relative_to=CONF_RELATIVE): return (Path(MOODLE_DIR) / container / 'conf' / Path(conf_path).relative_to(relative_to))", "and params['container_path'] == conf_path) def _match_metainfo_by_remote_path(parent, remote_path): p = parent", "in docker-compose.yml.') subprocess.run(args=args, check=True) def generate_proxy_conf(host, conf_path, extra_vars): template =", "basicConfig(level=INFO, format='%(message)s') def generate_local_path(host, conf_path, version=None): ret = Path(WORKDIR).absolute() /", "vars_path def init_shibboleth_part(conf_dir, hostname, volumes): shibboleth_volumes = ['/sys/fs/cgroup:/sys/fs/cgroup'] shibboleth_volumes.extend(volumes) params", "container, conf_path, relative_to) return generate_edit_link(local_path) def create_conf(host, container, conf_path, relative_to=CONF_RELATIVE,", "shibboleth_params={}): remote_path = MOODLE_DIR + '/docker-compose.yml' local_path = download_file(host, remote_path)", "'-m', 'copy', '-b', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {remote_path}", "to {host_1}') def restart_container(host, container): cmd = f'chdir={MOODLE_DIR} docker-compose restart", "version) local_path = get_local_path(host, container, conf_path, version) upload_conf_file(local_path, host, container,", "in ENV_INHERIT]) args = [ 'ansible', host, '-m', 'template', '-c',", "env=env, check=True) def update_docker_compose(host, extra_vars={}, shibboleth_params={}): remote_path = MOODLE_DIR +", "return generate_local_path(host, conf_path, version) def _match_metainfo(parent, container, conf_path): p =", "def _to_backup(conf): return conf.parent / (conf.name + '.orig') def make_backup(conf,", "if not quiet: logger.info(f'Copy {conf} {org}') shutil.copy2(conf, org) def make_metainfo(local_path,", "conf_path) def _match_metainfo_by_remote_path(parent, remote_path): p = parent / META_YML if", "conf_path, relative_to) if restart: restart_container(host, container) def revert_conf(host, container, conf_path,", "x in pdir.glob('*') if x.is_dir() and match(x, *args)]) def find_latest_version(host,", "as f: print(f.read()) def edit_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): conf", "unified_diff from logging import basicConfig, getLogger, INFO import os from", "if version is None: version = find_latest_version(host, container, conf_path) return", "def _match_metainfo(parent, container, conf_path): p = parent / META_YML if", "def upload_docker_compose(host, version=None, apply=False): remote_path = MOODLE_DIR + '/docker-compose.yml' if", "params = { 'container': container, 'container_path': conf_path, 'remote_path': str(generate_remote_path(container, conf_path,", "def edit_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host, container,", "get_local_path(host, container, conf_path, version) with conf.open() as f: print(f.read()) def", "diff = list(unified_diff( lines_a, lines_b, fromfile=path_a.name, tofile=path_b.name)) sys.stdout.writelines(diff) return len(diff)", "_to_backup(conf) if not quiet: logger.info(f'Copy {conf} {org}') shutil.copy2(conf, org) def", "return get_versions( host, remote_path, match=_match_metainfo_by_remote_path)[-1] def download_file(host, remote_path, conf_path=None): if", "restart {container}' logger.info(f'Restart container {container}') subprocess.check_call(['ansible', host, '-a', cmd]) def", "container, conf_path, version) local_path = get_local_path(host, container, conf_path, version) upload_conf_file(local_path,", "conf_path, version) local_path = get_local_path(host, container, conf_path, version) upload_conf_file(local_path, host,", "conf_path, version=None): local_path = get_local_path(host, container, conf_path, version) show_diff(_to_backup(local_path), local_path)", "ENV_INHERIT]) args = [ 'ansible', host, '-m', 'template', '-c', 'local',", "container): cmd = f'chdir={MOODLE_DIR} docker-compose restart {container}' logger.info(f'Restart container {container}')", "vars_path def setup_shibboleth_part(local_path, **params): if params is None or len(params)", "from datetime import datetime from difflib import unified_diff from logging", "in params and params['remote_path'] == remote_path) def get_versions(host, *args, match=_match_metainfo):", "generate_remote_path(container, conf_path, relative_to) return download_file(host, src, conf_path) def create_conf_file(host, conf_path):", "remote_path) shibboleth_vars = setup_shibboleth_part(local_path, **shibboleth_params) generate_docker_compose(host, local_path, extra_vars, [shibboleth_vars]) show_diff(_to_backup(local_path),", "conf_path) make_backup(local_path, quiet=True) make_metainfo(local_path, container, conf_path, relative_to) return generate_edit_link(local_path) def", "match=_match_metainfo_by_remote_path)[-1] def download_file(host, remote_path, conf_path=None): if conf_path is None: conf_path", "show_local_conf_diff(host, container, conf_path, version=None): local_path = get_local_path(host, container, conf_path, version)", "'template/docker/compose/moodle-proxy.conf.template' ansible_arg = f'src={template} dest={conf_path.parent}/moodle-proxy.conf' env = dict([(x, os.environ[x]) for", "conf_path, relative_to=CONF_RELATIVE, version=None, restart=True): diff = show_local_conf_diff(host, container, conf_path, version)", "with conf.open() as f: print(f.read()) def edit_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE,", "_match_metainfo(parent, container, conf_path): p = parent / META_YML if not", "upload_conf_file(src, host, container, conf_path, relative_to=CONF_RELATIVE): dest = generate_remote_path(container, conf_path, relative_to)", "f: lines_b = f.readlines() diff = list(unified_diff( lines_a, lines_b, fromfile=path_a.name,", "notebook import notebookapp from IPython.core.display import HTML WORKDIR = 'edit'", "/= Path(conf_path).name return ret def generate_remote_path(container, conf_path, relative_to=CONF_RELATIVE): return (Path(MOODLE_DIR)", "CONF_RELATIVE = '/etc' ENV_INHERIT = ['VAULT_ADDR', 'VAULT_TOKEN', 'PATH', 'REQUESTS_CA_BUNDLE'] logger", "update_proxy_conf(host, extra_vars={}): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') container = 'proxy' link =", "version) return link def apply_proxy_conf(host, version=None, restart=True): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf')", "= '/opt/moodle' CONF_RELATIVE = '/etc' ENV_INHERIT = ['VAULT_ADDR', 'VAULT_TOKEN', 'PATH',", "'extra_vars.yml' with vars_path.open(mode='w') as f: yaml.safe_dump(params, f) return vars_path def", "extra_vars={}, shibboleth_params={}): remote_path = MOODLE_DIR + '/docker-compose.yml' local_path = download_file(host,", "conf_path = Path(remote_path).name dest = generate_local_path(host, conf_path) ansible_arg = f'src={remote_path}", "'remote_path': remote_path, 'version': list(local_path.parts)[-2], } with (local_path.parent / META_YML).open(mode='w') as", "'shibboleth.yml' with vars_path.open(mode='w') as f: yaml.safe_dump(params, f) return vars_path def", "generate_local_path(host, conf_path) ansible_arg = f'src={remote_path} dest={dest} flat=yes' out = subprocess.check_output(", ") def upload_docker_compose(host, version=None, apply=False): remote_path = MOODLE_DIR + '/docker-compose.yml'", "apply_proxy_conf(host, version=None, restart=True): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') apply_conf(host, 'proxy', str(conf_path), str(conf_path.parent),", "MOODLE_DIR + '/docker-compose.yml' if version is None: version = find_latest_version_by_remote_path(host,", "out = subprocess.check_output( ['ansible', host, '-m', 'fetch', '-a', ansible_arg]) host_1", "remote_path): params = { 'remote_path': remote_path, 'version': list(local_path.parts)[-2], } with", "= [] lines_b = [] with path_a.open() as f: lines_a", "init_shibboleth_part(conf_dir, hostname, volumes): shibboleth_volumes = ['/sys/fs/cgroup:/sys/fs/cgroup'] shibboleth_volumes.extend(volumes) params = {", "for k, v in extra_vars.items(): args.extend(['-e', f'{k}={v}']) subprocess.run(args=args, env=env, check=True)", "version is None: version = find_latest_version_by_remote_path(host, remote_path) local_path = (", "notebookapp from IPython.core.display import HTML WORKDIR = 'edit' META_YML =", "= f'src={template} dest={conf_path.parent}/' env = dict([(x, os.environ[x]) for x in", "find_latest_version(host, container, str(conf_path)) local_path = generate_local_path(host, conf_path, version) generate_proxy_conf(host, local_path,", "logger.info(f'Uploading {remote_path} from {local_path} to {host_1}') if not apply: return", "Path(WORKDIR).absolute() / host if version is None: ret /= datetime.now().strftime(\"%Y%m%d%H%M%S%f\")", "None: conf_path = Path(remote_path).name dest = generate_local_path(host, conf_path) ansible_arg =", "Path(remote_path).name dest = generate_local_path(host, conf_path) ansible_arg = f'src={remote_path} dest={dest} flat=yes'", "import shutil import subprocess import sys import yaml from urllib.parse", "container / 'conf' / Path(conf_path).relative_to(relative_to)) def get_local_path(host, container, conf_path, version=None):", "org = _to_backup(conf) if not quiet: logger.info(f'Copy {conf} {org}') shutil.copy2(conf,", "conf_path.parent / 'extra_vars.yml' with vars_path.open(mode='w') as f: yaml.safe_dump(params, f) return", "import unified_diff from logging import basicConfig, getLogger, INFO import os", "link = fetch_conf(host, container, str(conf_path), str(conf_path.parent)) version = find_latest_version(host, container,", "import notebookapp from IPython.core.display import HTML WORKDIR = 'edit' META_YML", "'conf' / Path(conf_path).relative_to(relative_to)) def get_local_path(host, container, conf_path, version=None): if version", "'-m', 'copy', '-b', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {dest}", "params['remote_path'] == remote_path) def get_versions(host, *args, match=_match_metainfo): pdir = Path(WORKDIR).absolute()", "container, conf_path, relative_to=CONF_RELATIVE): dest = generate_remote_path(container, conf_path, relative_to) ansible_arg =", "generate_edit_link(conf) def show_local_conf_diff(host, container, conf_path, version=None): local_path = get_local_path(host, container,", "return get_versions(host, container, conf_path)[-1] def find_latest_version_by_remote_path(host, remote_path): return get_versions( host,", "{conf} {org}') shutil.copy2(conf, org) def make_metainfo(local_path, container, conf_path, relative_to=CONF_RELATIVE): params", "hostname, }), } vars_path = conf_dir / 'shibboleth.yml' with vars_path.open(mode='w')", "env = dict([(x, os.environ[x]) for x in ENV_INHERIT]) args =", "get_versions( host, remote_path, match=_match_metainfo_by_remote_path)[-1] def download_file(host, remote_path, conf_path=None): if conf_path", "'harbor.vcloud.nii.ac.jp/vcp/moodle:shibboleth-3.0.4', 'privileged': True, 'ports': ['443:443'], 'volumes': shibboleth_volumes, 'container_name': 'shibboleth', 'hostname':", "'volumes': shibboleth_volumes, 'container_name': 'shibboleth', 'hostname': hostname, }), } vars_path =", "= { 'container': container, 'container_path': conf_path, 'remote_path': str(generate_remote_path(container, conf_path, relative_to)),", "generate_edit_link(conf): nb_conf = list(notebookapp.list_running_servers())[0] p = (Path(nb_conf['base_url']) / 'edit' /", "vars_path = conf_dir / 'shibboleth.yml' with vars_path.open(mode='w') as f: yaml.safe_dump(params,", "apply=False): remote_path = MOODLE_DIR + '/docker-compose.yml' if version is None:", "fetch_conf(host, container, str(conf_path), str(conf_path.parent)) version = find_latest_version(host, container, str(conf_path)) local_path", "in ENV_INHERIT]) args = ['ansible', host, '-m', 'template', '-c', 'local',", "env=env, check=True) def update_proxy_conf(host, extra_vars={}): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') container =", "generate_proxy_conf(host, local_path, extra_vars) show_local_conf_diff(host, container, conf_path, version) return link def", "[] with path_a.open() as f: lines_a = f.readlines() with path_b.open()", "conf_path is None: conf_path = Path(remote_path).name dest = generate_local_path(host, conf_path)", "= ( Path(WORKDIR).absolute() / host / version / 'docker-compose.yml') ansible_arg", "f'chdir=/opt/moodle docker-compose up -d --remove-orphans' args = ['ansible', host, '-a',", "= fetch_conf(host, container, str(conf_path), str(conf_path.parent)) version = find_latest_version(host, container, str(conf_path))", "as f: yaml.safe_dump(params, stream=f, default_flow_style=False) def make_simple_metainfo(local_path, remote_path): params =", "is None: version = find_latest_version_by_remote_path(host, remote_path) local_path = ( Path(WORKDIR).absolute()", "backup_path = _to_backup(local_path) show_diff(local_path, backup_path) upload_conf_file(backup_path, host, container, conf_path, relative_to)", "/ host return sorted([ x.name for x in pdir.glob('*') if", "src={local_path} backup=yes' out = subprocess.check_output( ['ansible', host, '-m', 'copy', '-b',", "container) local_path.rename(local_path.parent / (local_path.name + '.revert')) def show_local_conf(host, container, conf_path,", "lines_a = f.readlines() with path_b.open() as f: lines_b = f.readlines()", "container = 'proxy' link = fetch_conf(host, container, str(conf_path), str(conf_path.parent)) version", "def create_conf_file(host, conf_path): dest = generate_local_path(host, conf_path) dest.parent.mkdir(parents=True, exist_ok=True) dest.touch()", "show_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host, container, conf_path,", "+ '.orig') def make_backup(conf, quiet=False): org = _to_backup(conf) if not", "default_flow_style=False) def generate_edit_link(conf): nb_conf = list(notebookapp.list_running_servers())[0] p = (Path(nb_conf['base_url']) /", "stream=f, default_flow_style=False) def make_simple_metainfo(local_path, remote_path): params = { 'remote_path': remote_path,", "version=None, restart=True): diff = show_local_conf_diff(host, container, conf_path, version) local_path =", "f: yaml.safe_dump(params, stream=f, default_flow_style=False) def make_simple_metainfo(local_path, remote_path): params = {", "f'src={template} dest={conf_path.parent}/' env = dict([(x, os.environ[x]) for x in ENV_INHERIT])", "relative_to=CONF_RELATIVE): params = { 'container': container, 'container_path': conf_path, 'remote_path': str(generate_remote_path(container,", "version ret /= Path(conf_path).name return ret def generate_remote_path(container, conf_path, relative_to=CONF_RELATIVE):", "logger.info(f'Downloading {remote_path} from {host_1} to {dest}') return dest def download_conf_file(host,", "= ['ansible', host, '-a', ansible_arg] logger.info('Apply the changes in docker-compose.yml.')", "get_versions(host, *args, match=_match_metainfo): pdir = Path(WORKDIR).absolute() / host return sorted([", "src={src} backup=yes' out = subprocess.check_output( ['ansible', host, '-m', 'copy', '-b',", "if 'shibboleth' in data['services']: params['shibboleth_container'] = yaml.safe_dump( data['services']['shibboleth']) vars_path =", "up -d --remove-orphans' args = ['ansible', host, '-a', ansible_arg] logger.info('Apply", "hostname, volumes): shibboleth_volumes = ['/sys/fs/cgroup:/sys/fs/cgroup'] shibboleth_volumes.extend(volumes) params = { 'shibboleth_container':", "fetch_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path = download_conf_file(host, container, conf_path,", "relative_to=CONF_RELATIVE, version=None, restart=True): diff = show_local_conf_diff(host, container, conf_path, version) local_path", "container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path = download_conf_file(host, container, conf_path, relative_to)", "{remote_path} from {host_1} to {dest}') return dest def download_conf_file(host, container,", "dest={dest} flat=yes' out = subprocess.check_output( ['ansible', host, '-m', 'fetch', '-a',", "= f'dest={remote_path} src={local_path} backup=yes' out = subprocess.check_output( ['ansible', host, '-m',", "version is None: version = find_latest_version(host, container, conf_path) return generate_local_path(host,", "container, conf_path, relative_to=CONF_RELATIVE): params = { 'container': container, 'container_path': conf_path,", "p = (Path(nb_conf['base_url']) / 'edit' / conf.absolute().relative_to(nb_conf['notebook_dir'])) return HTML(f'<a href={p}", "volumes): shibboleth_volumes = ['/sys/fs/cgroup:/sys/fs/cgroup'] shibboleth_volumes.extend(volumes) params = { 'shibboleth_container': yaml.safe_dump({", "'/docker-compose.yml' local_path = download_file(host, remote_path) make_backup(local_path) make_simple_metainfo(local_path, remote_path) shibboleth_vars =", "return False with p.open() as f: params = yaml.safe_load(f) return", "'template', '-c', 'local', '-a', ansible_arg] for k, v in extra_vars.items():", "'VAULT_TOKEN', 'PATH', 'REQUESTS_CA_BUNDLE'] logger = getLogger(__name__) basicConfig(level=INFO, format='%(message)s') def generate_local_path(host,", "x in ENV_INHERIT]) args = ['ansible', host, '-m', 'template', '-c',", "getLogger, INFO import os from pathlib import Path import shutil", "Path(WORKDIR).absolute() / host return sorted([ x.name for x in pdir.glob('*')", "local_path) return generate_edit_link(local_path) def append_shibboleth_container(host, moodle_url, volumes=[], extra_vars={}): hostname =", "conf_path): dest = generate_local_path(host, conf_path) dest.parent.mkdir(parents=True, exist_ok=True) dest.touch() return dest", "-p {dest.parent}' subprocess.run( ['ansible', host, '-a', ansible_arg]) ansible_arg = f'dest={dest}", "src, conf_path) def create_conf_file(host, conf_path): dest = generate_local_path(host, conf_path) dest.parent.mkdir(parents=True,", "conf_dir / 'shibboleth.yml' with vars_path.open(mode='w') as f: yaml.safe_dump(params, f) return", "( isinstance(params, dict) and 'container' in params and 'container_path' in", "args = ['ansible', host, '-m', 'template', '-c', 'local', '-a', ansible_arg]", "'shibboleth_container': yaml.safe_dump({ 'image': 'harbor.vcloud.nii.ac.jp/vcp/moodle:shibboleth-3.0.4', 'privileged': True, 'ports': ['443:443'], 'volumes': shibboleth_volumes,", "{host_1}') def restart_container(host, container): cmd = f'chdir={MOODLE_DIR} docker-compose restart {container}'", "lines_b = [] with path_a.open() as f: lines_a = f.readlines()", "/ 'extra_vars.yml' with vars_path.open(mode='w') as f: yaml.safe_dump(params, f) return vars_path", "os.environ[x]) for x in ENV_INHERIT]) args = ['ansible', host, '-m',", "remote_path = MOODLE_DIR + '/docker-compose.yml' if version is None: version", "def _match_metainfo_by_remote_path(parent, remote_path): p = parent / META_YML if not", "return generate_edit_link(local_path) def append_shibboleth_container(host, moodle_url, volumes=[], extra_vars={}): hostname = urlparse(moodle_url).netloc", "'REQUESTS_CA_BUNDLE'] logger = getLogger(__name__) basicConfig(level=INFO, format='%(message)s') def generate_local_path(host, conf_path, version=None):", "params['container'] == container and params['container_path'] == conf_path) def _match_metainfo_by_remote_path(parent, remote_path):", "dest.parent.mkdir(parents=True, exist_ok=True) dest.touch() return dest def _to_backup(conf): return conf.parent /", "str(conf_path), str(conf_path.parent)) version = find_latest_version(host, container, str(conf_path)) local_path = generate_local_path(host,", "/ META_YML).open(mode='w') as f: yaml.safe_dump(params, stream=f, default_flow_style=False) def make_simple_metainfo(local_path, remote_path):", "host, '-m', 'copy', '-b', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading", "= f'src={template} dest={conf_path.parent}/moodle-proxy.conf' env = dict([(x, os.environ[x]) for x in", "match(x, *args)]) def find_latest_version(host, container, conf_path): return get_versions(host, container, conf_path)[-1]", "format='%(message)s') def generate_local_path(host, conf_path, version=None): ret = Path(WORKDIR).absolute() / host", "conf = get_local_path(host, container, conf_path, version) return generate_edit_link(conf) def show_local_conf_diff(host,", "get_versions(host, container, conf_path)[-1] def find_latest_version_by_remote_path(host, remote_path): return get_versions( host, remote_path,", "download_file(host, remote_path, conf_path=None): if conf_path is None: conf_path = Path(remote_path).name", "return download_file(host, src, conf_path) def create_conf_file(host, conf_path): dest = generate_local_path(host,", "ansible_arg = f'mkdir -p {dest.parent}' subprocess.run( ['ansible', host, '-a', ansible_arg])", "/= version ret /= Path(conf_path).name return ret def generate_remote_path(container, conf_path,", "f'chdir={MOODLE_DIR} docker-compose restart {container}' logger.info(f'Restart container {container}') subprocess.check_call(['ansible', host, '-a',", "yaml.safe_dump({ 'image': 'harbor.vcloud.nii.ac.jp/vcp/moodle:shibboleth-3.0.4', 'privileged': True, 'ports': ['443:443'], 'volumes': shibboleth_volumes, 'container_name':", "['/sys/fs/cgroup:/sys/fs/cgroup'] shibboleth_volumes.extend(volumes) params = { 'shibboleth_container': yaml.safe_dump({ 'image': 'harbor.vcloud.nii.ac.jp/vcp/moodle:shibboleth-3.0.4', 'privileged':", "return ( isinstance(params, dict) and 'container' in params and 'container_path'", "out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {dest} from {src} to {host_1}') def restart_container(host, container):", "'remote_path' in params and params['remote_path'] == remote_path) def get_versions(host, *args,", "quiet=False): org = _to_backup(conf) if not quiet: logger.info(f'Copy {conf} {org}')", "basicConfig, getLogger, INFO import os from pathlib import Path import", "= { 'remote_path': remote_path, 'version': list(local_path.parts)[-2], } with (local_path.parent /", "(local_path.parent / META_YML).open(mode='w') as f: yaml.safe_dump(params, stream=f, default_flow_style=False) def generate_edit_link(conf):", "/ 'shibboleth.yml' with vars_path.open(mode='w') as f: yaml.safe_dump(params, f) return vars_path", "container, str(conf_path), str(conf_path.parent)) version = find_latest_version(host, container, str(conf_path)) local_path =", "urlparse from notebook import notebookapp from IPython.core.display import HTML WORKDIR", "{host_1} to {dest}') return dest def download_conf_file(host, container, conf_path, relative_to=CONF_RELATIVE):", "import subprocess import sys import yaml from urllib.parse import urlparse", "host, container, conf_path, relative_to) if restart: restart_container(host, container) def revert_conf(host,", "META_YML if not p.exists(): return False with p.open() as f:", "relative_to=CONF_RELATIVE, create=False): local_path = create_conf_file(host, conf_path) make_backup(local_path, quiet=True) make_metainfo(local_path, container,", "**shibboleth_params) generate_docker_compose(host, local_path, extra_vars, [shibboleth_vars]) show_diff(_to_backup(local_path), local_path) return generate_edit_link(local_path) def", "def show_local_conf_diff(host, container, conf_path, version=None): local_path = get_local_path(host, container, conf_path,", "dest = generate_local_path(host, conf_path) dest.parent.mkdir(parents=True, exist_ok=True) dest.touch() return dest def", "version) upload_conf_file(local_path, host, container, conf_path, relative_to) if restart: restart_container(host, container)", "in pdir.glob('*') if x.is_dir() and match(x, *args)]) def find_latest_version(host, container,", "data['services']['shibboleth']) vars_path = conf_path.parent / 'extra_vars.yml' with vars_path.open(mode='w') as f:", "extra_vars_file): template = 'template/docker/compose/docker-compose.yml' ansible_arg = f'src={template} dest={conf_path.parent}/' env =", "extra_vars, [shibboleth_vars]) show_diff(_to_backup(local_path), local_path) return generate_edit_link(local_path) def append_shibboleth_container(host, moodle_url, volumes=[],", "sorted([ x.name for x in pdir.glob('*') if x.is_dir() and match(x,", "relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host, container, conf_path, version) with conf.open()", "True, 'ports': ['443:443'], 'volumes': shibboleth_volumes, 'container_name': 'shibboleth', 'hostname': hostname, }),", "volumes}, ) def upload_docker_compose(host, version=None, apply=False): remote_path = MOODLE_DIR +", "{local_path} to {host_1}') if not apply: return ansible_arg = f'chdir=/opt/moodle", "out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {remote_path} from {local_path} to {host_1}') if not apply:", "hostname, 'volumes': volumes}, ) def upload_docker_compose(host, version=None, apply=False): remote_path =", "ansible_arg = f'dest={dest} src={src} backup=yes' out = subprocess.check_output( ['ansible', host,", "= generate_remote_path(container, conf_path, relative_to) return download_file(host, src, conf_path) def create_conf_file(host,", "_match_metainfo_by_remote_path(parent, remote_path): p = parent / META_YML if not p.exists():", "= generate_local_path(host, conf_path) dest.parent.mkdir(parents=True, exist_ok=True) dest.touch() return dest def _to_backup(conf):", "ENV_INHERIT]) args = ['ansible', host, '-m', 'template', '-c', 'local', '-a',", "parent / META_YML if not p.exists(): return False with p.open()", "conf_path, version=None): if version is None: version = find_latest_version(host, container,", "'-a', ansible_arg] logger.info('Apply the changes in docker-compose.yml.') subprocess.run(args=args, check=True) def", "x.name for x in pdir.glob('*') if x.is_dir() and match(x, *args)])", "def init_shibboleth_part(conf_dir, hostname, volumes): shibboleth_volumes = ['/sys/fs/cgroup:/sys/fs/cgroup'] shibboleth_volumes.extend(volumes) params =", "sys import yaml from urllib.parse import urlparse from notebook import", "f.readlines() diff = list(unified_diff( lines_a, lines_b, fromfile=path_a.name, tofile=path_b.name)) sys.stdout.writelines(diff) return", "[shibboleth_vars]) show_diff(_to_backup(local_path), local_path) return generate_edit_link(local_path) def append_shibboleth_container(host, moodle_url, volumes=[], extra_vars={}):", "= 'template/docker/compose/moodle-proxy.conf.template' ansible_arg = f'src={template} dest={conf_path.parent}/moodle-proxy.conf' env = dict([(x, os.environ[x])", "params['container_path'] == conf_path) def _match_metainfo_by_remote_path(parent, remote_path): p = parent /", "} vars_path = conf_dir / 'shibboleth.yml' with vars_path.open(mode='w') as f:", "= yaml.safe_load(f) return ( isinstance(params, dict) and 'remote_path' in params", "relative_to=CONF_RELATIVE): return (Path(MOODLE_DIR) / container / 'conf' / Path(conf_path).relative_to(relative_to)) def", "version) backup_path = _to_backup(local_path) show_diff(local_path, backup_path) upload_conf_file(backup_path, host, container, conf_path,", "datetime from difflib import unified_diff from logging import basicConfig, getLogger,", "nb_conf = list(notebookapp.list_running_servers())[0] p = (Path(nb_conf['base_url']) / 'edit' / conf.absolute().relative_to(nb_conf['notebook_dir']))", "Path(conf_path).name return ret def generate_remote_path(container, conf_path, relative_to=CONF_RELATIVE): return (Path(MOODLE_DIR) /", "Path import shutil import subprocess import sys import yaml from", "dict) and 'remote_path' in params and params['remote_path'] == remote_path) def", "conf_path)[-1] def find_latest_version_by_remote_path(host, remote_path): return get_versions( host, remote_path, match=_match_metainfo_by_remote_path)[-1] def", "/ (conf.name + '.orig') def make_backup(conf, quiet=False): org = _to_backup(conf)", "else: ret /= version ret /= Path(conf_path).name return ret def", "p = parent / META_YML if not p.exists(): return False", "/= datetime.now().strftime(\"%Y%m%d%H%M%S%f\") else: ret /= version ret /= Path(conf_path).name return", "relative_to=CONF_RELATIVE): src = generate_remote_path(container, conf_path, relative_to) return download_file(host, src, conf_path)", "ret def generate_remote_path(container, conf_path, relative_to=CONF_RELATIVE): return (Path(MOODLE_DIR) / container /", "META_YML).open(mode='w') as f: yaml.safe_dump(params, stream=f, default_flow_style=False) def generate_edit_link(conf): nb_conf =", "HTML(f'<a href={p} target=\"_blank\">{p.name}</a>') def show_diff(path_a, path_b): lines_a = [] lines_b", "logger.info(f'Restart container {container}') subprocess.check_call(['ansible', host, '-a', cmd]) def fetch_conf(host, container,", "def save_shibboleth_part(conf_path): with conf_path.open() as f: data = yaml.safe_load(f) params", "from {host_1} to {dest}') return dest def download_conf_file(host, container, conf_path,", "} with (local_path.parent / META_YML).open(mode='w') as f: yaml.safe_dump(params, stream=f, default_flow_style=False)", "stream=f, default_flow_style=False) def generate_edit_link(conf): nb_conf = list(notebookapp.list_running_servers())[0] p = (Path(nb_conf['base_url'])", "relative_to) return generate_edit_link(local_path) def create_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path", "quiet: logger.info(f'Copy {conf} {org}') shutil.copy2(conf, org) def make_metainfo(local_path, container, conf_path,", "f) return vars_path def init_shibboleth_part(conf_dir, hostname, volumes): shibboleth_volumes = ['/sys/fs/cgroup:/sys/fs/cgroup']", "params is None or len(params) == 0: return save_shibboleth_part(local_path) else:", "'template/docker/compose/docker-compose.yml' ansible_arg = f'src={template} dest={conf_path.parent}/' env = dict([(x, os.environ[x]) for", "{} if 'shibboleth' in data['services']: params['shibboleth_container'] = yaml.safe_dump( data['services']['shibboleth']) vars_path", "return save_shibboleth_part(local_path) else: return init_shibboleth_part(local_path.parent, **params) def generate_docker_compose(host, conf_path, extra_vars,", "docker-compose.yml.') subprocess.run(args=args, check=True) def generate_proxy_conf(host, conf_path, extra_vars): template = 'template/docker/compose/moodle-proxy.conf.template'", "conf_path, version=None): ret = Path(WORKDIR).absolute() / host if version is", "version=None): conf = get_local_path(host, container, conf_path, version) with conf.open() as", "create_conf_file(host, conf_path): dest = generate_local_path(host, conf_path) dest.parent.mkdir(parents=True, exist_ok=True) dest.touch() return", "(Path(MOODLE_DIR) / container / 'conf' / Path(conf_path).relative_to(relative_to)) def get_local_path(host, container,", "'fetch', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Downloading {remote_path} from {host_1}", "from IPython.core.display import HTML WORKDIR = 'edit' META_YML = '.vcp-meta.yml'", "{org}') shutil.copy2(conf, org) def make_metainfo(local_path, container, conf_path, relative_to=CONF_RELATIVE): params =", "as f: yaml.safe_dump(params, stream=f, default_flow_style=False) def generate_edit_link(conf): nb_conf = list(notebookapp.list_running_servers())[0]", "relative_to=CONF_RELATIVE, version=None): conf = get_local_path(host, container, conf_path, version) return generate_edit_link(conf)", "container, conf_path, version) return link def apply_proxy_conf(host, version=None, restart=True): conf_path", "conf_path=None): if conf_path is None: conf_path = Path(remote_path).name dest =", "yaml.safe_load(f) params = {} if 'shibboleth' in data['services']: params['shibboleth_container'] =", "os from pathlib import Path import shutil import subprocess import", "version = find_latest_version(host, container, conf_path) return generate_local_path(host, conf_path, version) def", "logger.info(f'Copy {conf} {org}') shutil.copy2(conf, org) def make_metainfo(local_path, container, conf_path, relative_to=CONF_RELATIVE):", "ret /= datetime.now().strftime(\"%Y%m%d%H%M%S%f\") else: ret /= version ret /= Path(conf_path).name", "generate_edit_link(local_path) def create_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path = create_conf_file(host,", "from logging import basicConfig, getLogger, INFO import os from pathlib", "path_b): lines_a = [] lines_b = [] with path_a.open() as", "str(conf_path)) local_path = generate_local_path(host, conf_path, version) generate_proxy_conf(host, local_path, extra_vars) show_local_conf_diff(host,", "show_diff(_to_backup(local_path), local_path) return generate_edit_link(local_path) def append_shibboleth_container(host, moodle_url, volumes=[], extra_vars={}): hostname", "{container}' logger.info(f'Restart container {container}') subprocess.check_call(['ansible', host, '-a', cmd]) def fetch_conf(host,", "os.environ[x]) for x in ENV_INHERIT]) args = [ 'ansible', host,", "backup=yes' out = subprocess.check_output( ['ansible', host, '-m', 'copy', '-b', '-a',", "v in extra_vars.items(): args.extend(['-e', f'{k}={v}']) subprocess.run(args=args, env=env, check=True) def update_proxy_conf(host,", "= subprocess.check_output( ['ansible', host, '-m', 'fetch', '-a', ansible_arg]) host_1 =", "f'src={remote_path} dest={dest} flat=yes' out = subprocess.check_output( ['ansible', host, '-m', 'fetch',", "dest def _to_backup(conf): return conf.parent / (conf.name + '.orig') def", "'/etc' ENV_INHERIT = ['VAULT_ADDR', 'VAULT_TOKEN', 'PATH', 'REQUESTS_CA_BUNDLE'] logger = getLogger(__name__)", "(Path(nb_conf['base_url']) / 'edit' / conf.absolute().relative_to(nb_conf['notebook_dir'])) return HTML(f'<a href={p} target=\"_blank\">{p.name}</a>') def", "to {host_1}') if not apply: return ansible_arg = f'chdir=/opt/moodle docker-compose", "str(conf_path.parent)) version = find_latest_version(host, container, str(conf_path)) local_path = generate_local_path(host, conf_path,", "container and params['container_path'] == conf_path) def _match_metainfo_by_remote_path(parent, remote_path): p =", "extra_vars.items(): args.extend(['-e', f'{k}={v}']) for x in extra_vars_file: args.extend(['-e', f'@{str(x)}']) subprocess.run(args=args,", "find_latest_version(host, container, conf_path) return generate_local_path(host, conf_path, version) def _match_metainfo(parent, container,", "return ansible_arg = f'chdir=/opt/moodle docker-compose up -d --remove-orphans' args =", "upload_conf_file(backup_path, host, container, conf_path, relative_to) restart_container(host, container) local_path.rename(local_path.parent / (local_path.name", "def find_latest_version(host, container, conf_path): return get_versions(host, container, conf_path)[-1] def find_latest_version_by_remote_path(host,", "def fetch_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path = download_conf_file(host, container,", "'-a', cmd]) def fetch_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path =", "host, extra_vars, shibboleth_params={'hostname': hostname, 'volumes': volumes}, ) def upload_docker_compose(host, version=None,", "= 'template/docker/compose/docker-compose.yml' ansible_arg = f'src={template} dest={conf_path.parent}/' env = dict([(x, os.environ[x])", "subprocess.check_output( ['ansible', host, '-m', 'copy', '-b', '-a', ansible_arg]) host_1 =", "and params['container'] == container and params['container_path'] == conf_path) def _match_metainfo_by_remote_path(parent,", "container, conf_path, relative_to) restart_container(host, container) local_path.rename(local_path.parent / (local_path.name + '.revert'))", "relative_to) if restart: restart_container(host, container) def revert_conf(host, container, conf_path, relative_to=CONF_RELATIVE,", "subprocess.run(args=args, env=env, check=True) def update_proxy_conf(host, extra_vars={}): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf') container", "= get_local_path(host, container, conf_path, version) backup_path = _to_backup(local_path) show_diff(local_path, backup_path)", "ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {remote_path} from {local_path} to {host_1}')", "local_path = download_conf_file(host, container, conf_path, relative_to) make_backup(local_path) make_metainfo(local_path, container, conf_path,", "= f'mkdir -p {dest.parent}' subprocess.run( ['ansible', host, '-a', ansible_arg]) ansible_arg", "revert_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None): local_path = get_local_path(host, container, conf_path,", "dest = generate_remote_path(container, conf_path, relative_to) ansible_arg = f'mkdir -p {dest.parent}'", "== 0: return save_shibboleth_part(local_path) else: return init_shibboleth_part(local_path.parent, **params) def generate_docker_compose(host,", "= dict([(x, os.environ[x]) for x in ENV_INHERIT]) args = ['ansible',", "and match(x, *args)]) def find_latest_version(host, container, conf_path): return get_versions(host, container,", "ENV_INHERIT = ['VAULT_ADDR', 'VAULT_TOKEN', 'PATH', 'REQUESTS_CA_BUNDLE'] logger = getLogger(__name__) basicConfig(level=INFO,", "extra_vars_file: args.extend(['-e', f'@{str(x)}']) subprocess.run(args=args, env=env, check=True) def update_docker_compose(host, extra_vars={}, shibboleth_params={}):", "x.is_dir() and match(x, *args)]) def find_latest_version(host, container, conf_path): return get_versions(host,", "import sys import yaml from urllib.parse import urlparse from notebook", "generate_proxy_conf(host, conf_path, extra_vars): template = 'template/docker/compose/moodle-proxy.conf.template' ansible_arg = f'src={template} dest={conf_path.parent}/moodle-proxy.conf'", "find_latest_version(host, container, conf_path): return get_versions(host, container, conf_path)[-1] def find_latest_version_by_remote_path(host, remote_path):", "None: ret /= datetime.now().strftime(\"%Y%m%d%H%M%S%f\") else: ret /= version ret /=", "remote_path) def get_versions(host, *args, match=_match_metainfo): pdir = Path(WORKDIR).absolute() / host", "'docker-compose.yml') ansible_arg = f'dest={remote_path} src={local_path} backup=yes' out = subprocess.check_output( ['ansible',", "conf = get_local_path(host, container, conf_path, version) with conf.open() as f:", "isinstance(params, dict) and 'remote_path' in params and params['remote_path'] == remote_path)", "f'mkdir -p {dest.parent}' subprocess.run( ['ansible', host, '-a', ansible_arg]) ansible_arg =", "def update_docker_compose(host, extra_vars={}, shibboleth_params={}): remote_path = MOODLE_DIR + '/docker-compose.yml' local_path", "cmd = f'chdir={MOODLE_DIR} docker-compose restart {container}' logger.info(f'Restart container {container}') subprocess.check_call(['ansible',", "extra_vars={}): hostname = urlparse(moodle_url).netloc return update_docker_compose( host, extra_vars, shibboleth_params={'hostname': hostname,", "def create_conf(host, container, conf_path, relative_to=CONF_RELATIVE, create=False): local_path = create_conf_file(host, conf_path)", "as f: yaml.safe_dump(params, f) return vars_path def setup_shibboleth_part(local_path, **params): if", "k, v in extra_vars.items(): args.extend(['-e', f'{k}={v}']) subprocess.run(args=args, env=env, check=True) def", "shibboleth_params={'hostname': hostname, 'volumes': volumes}, ) def upload_docker_compose(host, version=None, apply=False): remote_path", "yaml from urllib.parse import urlparse from notebook import notebookapp from", "diff = show_local_conf_diff(host, container, conf_path, version) local_path = get_local_path(host, container,", "container, conf_path, version) with conf.open() as f: print(f.read()) def edit_local_conf(host,", "HTML WORKDIR = 'edit' META_YML = '.vcp-meta.yml' MOODLE_DIR = '/opt/moodle'", "= generate_local_path(host, conf_path) ansible_arg = f'src={remote_path} dest={dest} flat=yes' out =", "yaml.safe_dump(params, f) return vars_path def init_shibboleth_part(conf_dir, hostname, volumes): shibboleth_volumes =", "/ META_YML if not p.exists(): return False with p.open() as", "with path_b.open() as f: lines_b = f.readlines() diff = list(unified_diff(", "ansible_arg]) ansible_arg = f'dest={dest} src={src} backup=yes' out = subprocess.check_output( ['ansible',", "ansible_arg] logger.info('Apply the changes in docker-compose.yml.') subprocess.run(args=args, check=True) def generate_proxy_conf(host,", "args = [ 'ansible', host, '-m', 'template', '-c', 'local', '-a',", "or len(params) == 0: return save_shibboleth_part(local_path) else: return init_shibboleth_part(local_path.parent, **params)", "/ version / 'docker-compose.yml') ansible_arg = f'dest={remote_path} src={local_path} backup=yes' out", "container, conf_path, relative_to=CONF_RELATIVE, version=None, restart=True): diff = show_local_conf_diff(host, container, conf_path,", "/ 'conf' / Path(conf_path).relative_to(relative_to)) def get_local_path(host, container, conf_path, version=None): if", "def apply_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None, restart=True): diff = show_local_conf_diff(host,", "= MOODLE_DIR + '/docker-compose.yml' local_path = download_file(host, remote_path) make_backup(local_path) make_simple_metainfo(local_path,", "dest={conf_path.parent}/moodle-proxy.conf' env = dict([(x, os.environ[x]) for x in ENV_INHERIT]) args", "= out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {remote_path} from {local_path} to {host_1}') if not", "ansible_arg] for k, v in extra_vars.items(): args.extend(['-e', f'{k}={v}']) for x", "f'{k}={v}']) subprocess.run(args=args, env=env, check=True) def update_proxy_conf(host, extra_vars={}): conf_path = Path('/usr/local/apache2/conf/moodle-proxy.conf')", "/ 'edit' / conf.absolute().relative_to(nb_conf['notebook_dir'])) return HTML(f'<a href={p} target=\"_blank\">{p.name}</a>') def show_diff(path_a,", "= get_local_path(host, container, conf_path, version) show_diff(_to_backup(local_path), local_path) def save_shibboleth_part(conf_path): with", "generate_local_path(host, conf_path, version) def _match_metainfo(parent, container, conf_path): p = parent", "= f.readlines() with path_b.open() as f: lines_b = f.readlines() diff", "None or len(params) == 0: return save_shibboleth_part(local_path) else: return init_shibboleth_part(local_path.parent,", "show_local_conf_diff(host, container, conf_path, version) return link def apply_proxy_conf(host, version=None, restart=True):", "apply: return ansible_arg = f'chdir=/opt/moodle docker-compose up -d --remove-orphans' args", "args = ['ansible', host, '-a', ansible_arg] logger.info('Apply the changes in", "shibboleth_volumes, 'container_name': 'shibboleth', 'hostname': hostname, }), } vars_path = conf_dir", "params and 'container_path' in params and params['container'] == container and", "params = yaml.safe_load(f) return ( isinstance(params, dict) and 'remote_path' in", "return conf.parent / (conf.name + '.orig') def make_backup(conf, quiet=False): org", "'.orig') def make_backup(conf, quiet=False): org = _to_backup(conf) if not quiet:", "ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {dest} from {src} to {host_1}')", "/ (local_path.name + '.revert')) def show_local_conf(host, container, conf_path, relative_to=CONF_RELATIVE, version=None):", "init_shibboleth_part(local_path.parent, **params) def generate_docker_compose(host, conf_path, extra_vars, extra_vars_file): template = 'template/docker/compose/docker-compose.yml'", "host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {dest} from {src} to {host_1}') def", "return ( isinstance(params, dict) and 'remote_path' in params and params['remote_path']", "make_simple_metainfo(local_path, remote_path): params = { 'remote_path': remote_path, 'version': list(local_path.parts)[-2], }", "= subprocess.check_output( ['ansible', host, '-m', 'copy', '-b', '-a', ansible_arg]) host_1", "'local', '-a', ansible_arg] for k, v in extra_vars.items(): args.extend(['-e', f'{k}={v}'])", "to {dest}') return dest def download_conf_file(host, container, conf_path, relative_to=CONF_RELATIVE): src", "{dest}') return dest def download_conf_file(host, container, conf_path, relative_to=CONF_RELATIVE): src =", "= show_local_conf_diff(host, container, conf_path, version) local_path = get_local_path(host, container, conf_path,", "conf_path, relative_to)), 'version': list(local_path.parts)[-2], } with (local_path.parent / META_YML).open(mode='w') as", "= conf_path.parent / 'extra_vars.yml' with vars_path.open(mode='w') as f: yaml.safe_dump(params, f)", "= urlparse(moodle_url).netloc return update_docker_compose( host, extra_vars, shibboleth_params={'hostname': hostname, 'volumes': volumes},", "'-b', '-a', ansible_arg]) host_1 = out.decode('utf-8').split(\"\\n\")[0].split()[0] logger.info(f'Uploading {dest} from {src}", "container, conf_path, relative_to=CONF_RELATIVE, version=None): local_path = get_local_path(host, container, conf_path, version)", "restart_container(host, container): cmd = f'chdir={MOODLE_DIR} docker-compose restart {container}' logger.info(f'Restart container", "conf_path, version) with conf.open() as f: print(f.read()) def edit_local_conf(host, container,", "is None: ret /= datetime.now().strftime(\"%Y%m%d%H%M%S%f\") else: ret /= version ret", "'edit' / conf.absolute().relative_to(nb_conf['notebook_dir'])) return HTML(f'<a href={p} target=\"_blank\">{p.name}</a>') def show_diff(path_a, path_b):", "create=False): local_path = create_conf_file(host, conf_path) make_backup(local_path, quiet=True) make_metainfo(local_path, container, conf_path,", "= 'edit' META_YML = '.vcp-meta.yml' MOODLE_DIR = '/opt/moodle' CONF_RELATIVE =", "host / version / 'docker-compose.yml') ansible_arg = f'dest={remote_path} src={local_path} backup=yes'", "upload_conf_file(local_path, host, container, conf_path, relative_to) if restart: restart_container(host, container) def", "{ 'shibboleth_container': yaml.safe_dump({ 'image': 'harbor.vcloud.nii.ac.jp/vcp/moodle:shibboleth-3.0.4', 'privileged': True, 'ports': ['443:443'], 'volumes':", "INFO import os from pathlib import Path import shutil import", "'version': list(local_path.parts)[-2], } with (local_path.parent / META_YML).open(mode='w') as f: yaml.safe_dump(params,", "v in extra_vars.items(): args.extend(['-e', f'{k}={v}']) for x in extra_vars_file: args.extend(['-e',", "/ host if version is None: ret /= datetime.now().strftime(\"%Y%m%d%H%M%S%f\") else:", "conf_path, version) backup_path = _to_backup(local_path) show_diff(local_path, backup_path) upload_conf_file(backup_path, host, container,", "params['shibboleth_container'] = yaml.safe_dump( data['services']['shibboleth']) vars_path = conf_path.parent / 'extra_vars.yml' with", "shibboleth_vars = setup_shibboleth_part(local_path, **shibboleth_params) generate_docker_compose(host, local_path, extra_vars, [shibboleth_vars]) show_diff(_to_backup(local_path), local_path)", "container, conf_path, version) backup_path = _to_backup(local_path) show_diff(local_path, backup_path) upload_conf_file(backup_path, host,", "vars_path.open(mode='w') as f: yaml.safe_dump(params, f) return vars_path def setup_shibboleth_part(local_path, **params):", "get_local_path(host, container, conf_path, version) upload_conf_file(local_path, host, container, conf_path, relative_to) if", "dest = generate_local_path(host, conf_path) ansible_arg = f'src={remote_path} dest={dest} flat=yes' out", "version=None): ret = Path(WORKDIR).absolute() / host if version is None:", "lines_a, lines_b, fromfile=path_a.name, tofile=path_b.name)) sys.stdout.writelines(diff) return len(diff) def upload_conf_file(src, host,", "conf_path, version) show_diff(_to_backup(local_path), local_path) def save_shibboleth_part(conf_path): with conf_path.open() as f:", "'remote_path': str(generate_remote_path(container, conf_path, relative_to)), 'version': list(local_path.parts)[-2], } with (local_path.parent /", "META_YML).open(mode='w') as f: yaml.safe_dump(params, stream=f, default_flow_style=False) def make_simple_metainfo(local_path, remote_path): params", "'-c', 'local', '-a', ansible_arg] for k, v in extra_vars.items(): args.extend(['-e',", "version = find_latest_version_by_remote_path(host, remote_path) local_path = ( Path(WORKDIR).absolute() / host", "conf.absolute().relative_to(nb_conf['notebook_dir'])) return HTML(f'<a href={p} target=\"_blank\">{p.name}</a>') def show_diff(path_a, path_b): lines_a =", "def upload_conf_file(src, host, container, conf_path, relative_to=CONF_RELATIVE): dest = generate_remote_path(container, conf_path,", "shutil.copy2(conf, org) def make_metainfo(local_path, container, conf_path, relative_to=CONF_RELATIVE): params = {", "download_conf_file(host, container, conf_path, relative_to) make_backup(local_path) make_metainfo(local_path, container, conf_path, relative_to) return", "container, conf_path, relative_to=CONF_RELATIVE): src = generate_remote_path(container, conf_path, relative_to) return download_file(host,", "= '.vcp-meta.yml' MOODLE_DIR = '/opt/moodle' CONF_RELATIVE = '/etc' ENV_INHERIT =" ]
[ "edit distance computes the cost it takes to get from", "len(source) m = len(target) D = {} # Initialization for", "j in range(0, m+1): D[0,j] = j for i in", "in range(1, m+1): if source[i-1] == target[j-1]: D[i,j] = D[i-1,", "i in range(0, n+1): D[i,0] = i for j in", "computes the cost it takes to get from one string", "for insertions or deletions and a cost of 2 for", "= D[i-1, j-1] else: D[i,j] = min( D[i-1, j] +", "else: D[i,j] = min( D[i-1, j] + 1, D[i, j-1]", "to another string. This implementation uses the Levenshtein distance with", "get from one string to another string. This implementation uses", "= len(target) D = {} # Initialization for i in", "for j in range(1, m+1): if source[i-1] == target[j-1]: D[i,j]", "deletions and a cost of 2 for substitutions. Resource: https://en.wikipedia.org/wiki/Edit_distance", "it takes to get from one string to another string.", "substitutions. Resource: https://en.wikipedia.org/wiki/Edit_distance For example, getting from \"intention\" to \"execution\"", "This implementation uses the Levenshtein distance with a cost of", "uses the Levenshtein distance with a cost of 1 for", "m+1): if source[i-1] == target[j-1]: D[i,j] = D[i-1, j-1] else:", "min( D[i-1, j] + 1, D[i, j-1] + 1, D[i-1,", "source[i-1] == target[j-1]: D[i,j] = D[i-1, j-1] else: D[i,j] =", "takes to get from one string to another string. This", "another string. This implementation uses the Levenshtein distance with a", "distance with a cost of 1 for insertions or deletions", "= len(source) m = len(target) D = {} # Initialization", "== target[j-1]: D[i,j] = D[i-1, j-1] else: D[i,j] = min(", "Initialization for i in range(0, n+1): D[i,0] = i for", "if source[i-1] == target[j-1]: D[i,j] = D[i-1, j-1] else: D[i,j]", "one string to another string. This implementation uses the Levenshtein", "Resource: https://en.wikipedia.org/wiki/Edit_distance For example, getting from \"intention\" to \"execution\" is", "2 for substitutions. Resource: https://en.wikipedia.org/wiki/Edit_distance For example, getting from \"intention\"", "len(target) D = {} # Initialization for i in range(0,", "# Initialization for i in range(0, n+1): D[i,0] = i", "D[i-1, j-1] else: D[i,j] = min( D[i-1, j] + 1,", "string. This implementation uses the Levenshtein distance with a cost", "is a cost of 8. minimum_edit_distance(\"intention\", \"execution\") # 8 \"\"\"", "a cost of 2 for substitutions. Resource: https://en.wikipedia.org/wiki/Edit_distance For example,", "minimum_edit_distance(source, target): n = len(source) m = len(target) D =", "1, D[i, j-1] + 1, D[i-1, j-1] + 2 )", "in range(0, m+1): D[0,j] = j for i in range(1,", "in range(0, n+1): D[i,0] = i for j in range(0,", "getting from \"intention\" to \"execution\" is a cost of 8.", "n = len(source) m = len(target) D = {} #", "target): n = len(source) m = len(target) D = {}", "and a cost of 2 for substitutions. Resource: https://en.wikipedia.org/wiki/Edit_distance For", "{} # Initialization for i in range(0, n+1): D[i,0] =", "\"execution\") # 8 \"\"\" def minimum_edit_distance(source, target): n = len(source)", "\"\"\" Minimum edit distance computes the cost it takes to", "for i in range(0, n+1): D[i,0] = i for j", "\"intention\" to \"execution\" is a cost of 8. minimum_edit_distance(\"intention\", \"execution\")", "def minimum_edit_distance(source, target): n = len(source) m = len(target) D", "of 1 for insertions or deletions and a cost of", "insertions or deletions and a cost of 2 for substitutions.", "minimum_edit_distance(\"intention\", \"execution\") # 8 \"\"\" def minimum_edit_distance(source, target): n =", "# 8 \"\"\" def minimum_edit_distance(source, target): n = len(source) m", "for j in range(0, m+1): D[0,j] = j for i", "target[j-1]: D[i,j] = D[i-1, j-1] else: D[i,j] = min( D[i-1,", "from one string to another string. This implementation uses the", "to \"execution\" is a cost of 8. minimum_edit_distance(\"intention\", \"execution\") #", "j for i in range(1, n+1): for j in range(1,", "to get from one string to another string. This implementation", "Minimum edit distance computes the cost it takes to get", "range(0, n+1): D[i,0] = i for j in range(0, m+1):", "implementation uses the Levenshtein distance with a cost of 1", "cost of 2 for substitutions. Resource: https://en.wikipedia.org/wiki/Edit_distance For example, getting", "from \"intention\" to \"execution\" is a cost of 8. minimum_edit_distance(\"intention\",", "the Levenshtein distance with a cost of 1 for insertions", "1 for insertions or deletions and a cost of 2", "j in range(1, m+1): if source[i-1] == target[j-1]: D[i,j] =", "D[0,j] = j for i in range(1, n+1): for j", "range(1, m+1): if source[i-1] == target[j-1]: D[i,j] = D[i-1, j-1]", "distance computes the cost it takes to get from one", "range(1, n+1): for j in range(1, m+1): if source[i-1] ==", "\"\"\" def minimum_edit_distance(source, target): n = len(source) m = len(target)", "D[i, j-1] + 1, D[i-1, j-1] + 2 ) return", "with a cost of 1 for insertions or deletions and", "a cost of 8. minimum_edit_distance(\"intention\", \"execution\") # 8 \"\"\" def", "D[i,j] = D[i-1, j-1] else: D[i,j] = min( D[i-1, j]", "a cost of 1 for insertions or deletions and a", "i in range(1, n+1): for j in range(1, m+1): if", "= min( D[i-1, j] + 1, D[i, j-1] + 1,", "cost of 1 for insertions or deletions and a cost", "cost it takes to get from one string to another", "https://en.wikipedia.org/wiki/Edit_distance For example, getting from \"intention\" to \"execution\" is a", "for substitutions. Resource: https://en.wikipedia.org/wiki/Edit_distance For example, getting from \"intention\" to", "string to another string. This implementation uses the Levenshtein distance", "cost of 8. minimum_edit_distance(\"intention\", \"execution\") # 8 \"\"\" def minimum_edit_distance(source,", "+ 1, D[i, j-1] + 1, D[i-1, j-1] + 2", "Levenshtein distance with a cost of 1 for insertions or", "= {} # Initialization for i in range(0, n+1): D[i,0]", "the cost it takes to get from one string to", "example, getting from \"intention\" to \"execution\" is a cost of", "D = {} # Initialization for i in range(0, n+1):", "range(0, m+1): D[0,j] = j for i in range(1, n+1):", "n+1): for j in range(1, m+1): if source[i-1] == target[j-1]:", "of 2 for substitutions. Resource: https://en.wikipedia.org/wiki/Edit_distance For example, getting from", "D[i,j] = min( D[i-1, j] + 1, D[i, j-1] +", "8 \"\"\" def minimum_edit_distance(source, target): n = len(source) m =", "of 8. minimum_edit_distance(\"intention\", \"execution\") # 8 \"\"\" def minimum_edit_distance(source, target):", "i for j in range(0, m+1): D[0,j] = j for", "8. minimum_edit_distance(\"intention\", \"execution\") # 8 \"\"\" def minimum_edit_distance(source, target): n", "D[i,0] = i for j in range(0, m+1): D[0,j] =", "j-1] else: D[i,j] = min( D[i-1, j] + 1, D[i,", "= j for i in range(1, n+1): for j in", "D[i-1, j] + 1, D[i, j-1] + 1, D[i-1, j-1]", "n+1): D[i,0] = i for j in range(0, m+1): D[0,j]", "or deletions and a cost of 2 for substitutions. Resource:", "+ 1, D[i-1, j-1] + 2 ) return D[n-1, m-1]", "\"execution\" is a cost of 8. minimum_edit_distance(\"intention\", \"execution\") # 8", "in range(1, n+1): for j in range(1, m+1): if source[i-1]", "for i in range(1, n+1): for j in range(1, m+1):", "j] + 1, D[i, j-1] + 1, D[i-1, j-1] +", "m = len(target) D = {} # Initialization for i", "j-1] + 1, D[i-1, j-1] + 2 ) return D[n-1,", "= i for j in range(0, m+1): D[0,j] = j", "m+1): D[0,j] = j for i in range(1, n+1): for", "For example, getting from \"intention\" to \"execution\" is a cost" ]
[ "outfile, threads=1, maxmatch=True): delta = f\"{outfile}.tmp.delta\" delta_1 = f\"{outfile}.tmp.1delta\" subprocess.check_output(f\"rm", ": new_record.POS + len(new_record.REF) ] ) if new_record.CHROM not in", "new_record.POS + len(new_record.REF) ] ) if new_record.CHROM not in vcf_records:", "os.unlink(delta_1) def _run_dnadiff( ref_fasta, query_fasta, outfile, split_query=False, debug=False, threads=1, maxmatch=True,", "addition to making the snps file. def _run_dnadiff_one_split(ref_fasta, query_fasta, outfile,", "this is a deletion. So need to # get the", "well. new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start), \".\", query_seqs[variant.qry_name][variant.qry_start", "\".\", \"SVTYPE=DNADIFF_SNP\", \"GT\", \"1/1\", ] ) ) elif variant.var_type ==", "logging.info(\"Finish run command: \" + command) os.unlink(delta) os.unlink(delta_1) def _run_dnadiff(", "variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_INS\", \"GT\", \"1/1\", ] ) ) elif", "maxmatch=maxmatch ) os.unlink(tmp_fasta) tmp_snp_files.append(snp_file) with open(outfile, \"wb\") as f_out: for", "the VCF has the correct REF and ALT sequences if", "def make_truth_vcf( ref_fasta, truth_fasta, outfile, debug=False, split_ref=False, threads=1, maxmatch=True, ):", "import shutil import subprocess import pyfastaq import pymummer from cluster_vcf_records", "--threads {threads} {maxmatch_opt} --delta {delta} {ref_fasta} {query_fasta}\", f\"delta-filter -1 {delta}", "mummer. Need to do the appropriate # reverse (complement) fixes", "elif variant.var_type == pymummer.variant.DEL: # The query has sequence missing,", "from operator import attrgetter import logging import os import shutil", "outfile, threads=threads, maxmatch=maxmatch ) else: tmp_snp_files = [] seq_reader =", "a new VCF file unmerged records.\"\"\" vcf_records = {} variants", "raise Exception(\"Unknown variant type: \" + str(variant)) assert ( new_record.REF", "qry_seq.revcomp() variant.qry_base = \"\".join(reversed(qry_seq.seq)) ref_seq = pyfastaq.sequences.Fasta(\"x\", variant.ref_base) ref_seq.revcomp() variant.ref_base", "delta = f\"{outfile}.tmp.delta\" delta_1 = f\"{outfile}.tmp.1delta\" subprocess.check_output(f\"rm -f {delta} {delta_1}\",", "We're making VCF records w.r.t. the # query, so this", "for vcf_list in vcf_records.values(): vcf_list.sort(key=attrgetter(\"POS\")) with open(outfile, \"w\") as f:", "print(\"##fileformat=VCFv4.2\", file=f) for seq in query_seqs.values(): print(f\"##contig=<ID={seq.id},length={len(seq)}>\", file=f) print(\"#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tsample\", file=f)", "def _snps_file_to_vcf(snps_file, query_fasta, outfile): \"\"\"Loads the .snps file made by", "\"1/1\", ] ) ) else: raise Exception(\"Unknown variant type: \"", "[ variant.qry_name, str(variant.qry_start), \".\", query_seqs[variant.qry_name][variant.qry_start - 1] + variant.qry_base, query_seqs[variant.qry_name][variant.qry_start", "VCF file unmerged records.\"\"\" vcf_records = {} variants = pymummer.snp_file.get_all_variants(snps_file)", "vcf_records = {} variants = pymummer.snp_file.get_all_variants(snps_file) query_seqs = utils.file_to_dict_of_seqs(query_fasta) for", "snps_file, split_query=split_ref, debug=debug, threads=threads, maxmatch=maxmatch, ) _snps_file_to_vcf(snps_file, ref_fasta, outfile) if", "import os import shutil import subprocess import pyfastaq import pymummer", "vcf_record from varifier import utils # We only want the", "if variant.reverse: qry_seq = pyfastaq.sequences.Fasta(\"x\", variant.qry_base) qry_seq.revcomp() variant.qry_base = \"\".join(reversed(qry_seq.seq))", "the deletion as well. new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name,", "vcf_records[new_record.CHROM] = [] vcf_records[new_record.CHROM].append(new_record) for vcf_list in vcf_records.values(): vcf_list.sort(key=attrgetter(\"POS\")) with", "open(outfile, \"wb\") as f_out: for snp_file in tmp_snp_files: with open(snp_file,", "The query has sequence missing, compared to the # reference.", "f\"nucmer --threads {threads} {maxmatch_opt} --delta {delta} {ref_fasta} {query_fasta}\", f\"delta-filter -1", "f: print(\"##fileformat=VCFv4.2\", file=f) for seq in query_seqs.values(): print(f\"##contig=<ID={seq.id},length={len(seq)}>\", file=f) print(\"#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tsample\",", "ref_fasta, query_fasta, outfile, threads=threads, maxmatch=maxmatch ) else: tmp_snp_files = []", "query_seqs[new_record.CHROM][ new_record.POS : new_record.POS + len(new_record.REF) ] ) if new_record.CHROM", "> {delta_1}\", f\"show-snps -rlTHC {delta_1} > {outfile}\", ] for command", "of query sequences. Writes a new VCF file unmerged records.\"\"\"", "variant.qry_base = \"\".join(reversed(qry_seq.seq)) ref_seq = pyfastaq.sequences.Fasta(\"x\", variant.ref_base) ref_seq.revcomp() variant.ref_base =", "+ str(variant)) assert ( new_record.REF == query_seqs[new_record.CHROM][ new_record.POS : new_record.POS", "vcf_list: print(record, file=f) def make_truth_vcf( ref_fasta, truth_fasta, outfile, debug=False, split_ref=False,", "str(variant.qry_start + 1), \".\", query_seqs[variant.qry_name][variant.qry_start], query_seqs[variant.qry_name][variant.qry_start] + variant.ref_base, \".\", \".\",", "\" + command) os.unlink(delta) os.unlink(delta_1) def _run_dnadiff( ref_fasta, query_fasta, outfile,", "f\"delta-filter -1 {delta} > {delta_1}\", f\"show-snps -rlTHC {delta_1} > {outfile}\",", "{delta} > {delta_1}\", f\"show-snps -rlTHC {delta_1} > {outfile}\", ] for", "threads=1, maxmatch=True): delta = f\"{outfile}.tmp.delta\" delta_1 = f\"{outfile}.tmp.1delta\" subprocess.check_output(f\"rm -f", "not debug: os.unlink(snp_file) def _snps_file_to_vcf(snps_file, query_fasta, outfile): \"\"\"Loads the .snps", "had to be # reverse complemented when aligned by mummer.", "= pymummer.snp_file.get_all_variants(snps_file) query_seqs = utils.file_to_dict_of_seqs(query_fasta) for variant in variants: #", "delta_1 = f\"{outfile}.tmp.1delta\" subprocess.check_output(f\"rm -f {delta} {delta_1}\", shell=True) maxmatch_opt =", "[] seq_reader = pyfastaq.sequences.file_reader(query_fasta) for seq in seq_reader: prefix =", "need to # get the nucleotide before the insertion as", "prefix = f\"{outfile}.tmp.split.{len(tmp_snp_files)}\" tmp_fasta = f\"{prefix}.fasta\" with open(tmp_fasta, \"w\") as", "\"\\t\".join( [ variant.qry_name, str(variant.qry_start + 1), \".\", variant.qry_base, variant.ref_base, \".\",", "has the correct REF and ALT sequences if variant.reverse: qry_seq", "snps_file = f\"{outfile}.tmp.snps\" _run_dnadiff( truth_fasta, ref_fasta, snps_file, split_query=split_ref, debug=debug, threads=threads,", "file unmerged records.\"\"\" vcf_records = {} variants = pymummer.snp_file.get_all_variants(snps_file) query_seqs", "variants: # If the variant is reversed, it means that", "snp_file, threads=threads, maxmatch=maxmatch ) os.unlink(tmp_fasta) tmp_snp_files.append(snp_file) with open(outfile, \"wb\") as", ") ) elif variant.var_type == pymummer.variant.DEL: # The query has", "for command in commands: logging.info(\"Start run command: \" + command)", "str(variant.qry_start + 1), \".\", variant.qry_base, variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_SNP\", \"GT\",", "str(variant)) assert ( new_record.REF == query_seqs[new_record.CHROM][ new_record.POS : new_record.POS +", "VCF records w.r.t. the # query, so this is a", "made by dnadiff. query_fasta = fasta file of query sequences.", "runs several other commands # in addition to making the", "command) subprocess.check_output(command, shell=True) logging.info(\"Finish run command: \" + command) os.unlink(delta)", "query, so this is an insertion. So need to #", "variant.qry_name, str(variant.qry_start + 1), \".\", query_seqs[variant.qry_name][variant.qry_start], query_seqs[variant.qry_name][variant.qry_start] + variant.ref_base, \".\",", "truth_fasta, outfile, debug=False, split_ref=False, threads=1, maxmatch=True, ): snps_file = f\"{outfile}.tmp.snps\"", "missing, compared to the # reference. We're making VCF records", "If the variant is reversed, it means that either the", "[ variant.qry_name, str(variant.qry_start + 1), \".\", query_seqs[variant.qry_name][variant.qry_start], query_seqs[variant.qry_name][variant.qry_start] + variant.ref_base,", "cluster_vcf_records import vcf_record from varifier import utils # We only", "record in vcf_list: print(record, file=f) def make_truth_vcf( ref_fasta, truth_fasta, outfile,", "-1 out.delta > out.1delta # show-snps -rlTHC out.1delta > out.snps", "w.r.t. the # query, so this is an insertion. So", "threads=threads, maxmatch=maxmatch ) os.unlink(tmp_fasta) tmp_snp_files.append(snp_file) with open(outfile, \"wb\") as f_out:", "from MUMmer. From reading # the docs inspecting that script,", "if maxmatch else \"\" commands = [ f\"nucmer --threads {threads}", "run command: \" + command) subprocess.check_output(command, shell=True) logging.info(\"Finish run command:", "variant.qry_base, variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_SNP\", \"GT\", \"1/1\", ] ) )", "so the VCF has the correct REF and ALT sequences", "# delta-filter -1 out.delta > out.1delta # show-snps -rlTHC out.1delta", "in commands: logging.info(\"Start run command: \" + command) subprocess.check_output(command, shell=True)", "= vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start + 1), \".\", query_seqs[variant.qry_name][variant.qry_start],", "file. def _run_dnadiff_one_split(ref_fasta, query_fasta, outfile, threads=1, maxmatch=True): delta = f\"{outfile}.tmp.delta\"", "tmp_fasta, snp_file, threads=threads, maxmatch=maxmatch ) os.unlink(tmp_fasta) tmp_snp_files.append(snp_file) with open(outfile, \"wb\")", "file of query sequences. Writes a new VCF file unmerged", "query has sequence missing, compared to the # reference. We're", "means that either the ref or query had to be", "deletion. So need to # get the nucleotide before the", "reverse complemented when aligned by mummer. Need to do the", "query sequences. Writes a new VCF file unmerged records.\"\"\" vcf_records", "open(tmp_fasta, \"w\") as f: print(seq, file=f) snp_file = f\"{prefix}.snps\" _run_dnadiff_one_split(", "1), \".\", variant.qry_base, variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_SNP\", \"GT\", \"1/1\", ]", "): if not split_query: _run_dnadiff_one_split( ref_fasta, query_fasta, outfile, threads=threads, maxmatch=maxmatch", "type: \" + str(variant)) assert ( new_record.REF == query_seqs[new_record.CHROM][ new_record.POS", ") ) elif variant.var_type == pymummer.variant.INS: # The ref has", "compared to the # query. We're making VCF records w.r.t.", "--delta {delta} {ref_fasta} {query_fasta}\", f\"delta-filter -1 {delta} > {delta_1}\", f\"show-snps", "to # get the nucleotide before the deletion as well.", "sequence missing, compared to the # reference. We're making VCF", "= f\"{outfile}.tmp.snps\" _run_dnadiff( truth_fasta, ref_fasta, snps_file, split_query=split_ref, debug=debug, threads=threads, maxmatch=maxmatch,", "shell=True) logging.info(\"Finish run command: \" + command) os.unlink(delta) os.unlink(delta_1) def", "the # query. We're making VCF records w.r.t. the #", "split_query: _run_dnadiff_one_split( ref_fasta, query_fasta, outfile, threads=threads, maxmatch=maxmatch ) else: tmp_snp_files", "subprocess import pyfastaq import pymummer from cluster_vcf_records import vcf_record from", "that script, we need to run these commands: # #", "# The ref has sequence missing, compared to the #", "debug=False, threads=1, maxmatch=True, ): if not split_query: _run_dnadiff_one_split( ref_fasta, query_fasta,", "insertion as well. new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start", "attrgetter import logging import os import shutil import subprocess import", "to the # reference. We're making VCF records w.r.t. the", "has sequence missing, compared to the # reference. We're making", "\"w\") as f: print(\"##fileformat=VCFv4.2\", file=f) for seq in query_seqs.values(): print(f\"##contig=<ID={seq.id},length={len(seq)}>\",", "qry_seq = pyfastaq.sequences.Fasta(\"x\", variant.qry_base) qry_seq.revcomp() variant.qry_base = \"\".join(reversed(qry_seq.seq)) ref_seq =", "insertion. So need to # get the nucleotide before the", "\"w\") as f: print(seq, file=f) snp_file = f\"{prefix}.snps\" _run_dnadiff_one_split( ref_fasta,", "\"SVTYPE=DNADIFF_INS\", \"GT\", \"1/1\", ] ) ) elif variant.var_type == pymummer.variant.INS:", "\" + str(variant)) assert ( new_record.REF == query_seqs[new_record.CHROM][ new_record.POS :", "not split_query: _run_dnadiff_one_split( ref_fasta, query_fasta, outfile, threads=threads, maxmatch=maxmatch ) else:", "for seq in seq_reader: prefix = f\"{outfile}.tmp.split.{len(tmp_snp_files)}\" tmp_fasta = f\"{prefix}.fasta\"", ") elif variant.var_type == pymummer.variant.DEL: # The query has sequence", "ref_fasta, truth_fasta, outfile, debug=False, split_ref=False, threads=1, maxmatch=True, ): snps_file =", "vcf_list.sort(key=attrgetter(\"POS\")) with open(outfile, \"w\") as f: print(\"##fileformat=VCFv4.2\", file=f) for seq", "instead of just running show-snps, which runs several other commands", "if not split_query: _run_dnadiff_one_split( ref_fasta, query_fasta, outfile, threads=threads, maxmatch=maxmatch )", "os.unlink(tmp_fasta) tmp_snp_files.append(snp_file) with open(outfile, \"wb\") as f_out: for snp_file in", "= [ f\"nucmer --threads {threads} {maxmatch_opt} --delta {delta} {ref_fasta} {query_fasta}\",", "variant.var_type == pymummer.variant.SNP: new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start", "== pymummer.variant.SNP: new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start +", "variant.qry_name, str(variant.qry_start + 1), \".\", variant.qry_base, variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_SNP\",", "making VCF records w.r.t. the # query, so this is", "_run_dnadiff( truth_fasta, ref_fasta, snps_file, split_query=split_ref, debug=debug, threads=threads, maxmatch=maxmatch, ) _snps_file_to_vcf(snps_file,", "key, vcf_list in sorted(vcf_records.items()): for record in vcf_list: print(record, file=f)", "file=f) def make_truth_vcf( ref_fasta, truth_fasta, outfile, debug=False, split_ref=False, threads=1, maxmatch=True,", "is a deletion. So need to # get the nucleotide", "+ len(new_record.REF) ] ) if new_record.CHROM not in vcf_records: vcf_records[new_record.CHROM]", "or query had to be # reverse complemented when aligned", "{} variants = pymummer.snp_file.get_all_variants(snps_file) query_seqs = utils.file_to_dict_of_seqs(query_fasta) for variant in", "# get the nucleotide before the deletion as well. new_record", "{delta_1} > {outfile}\", ] for command in commands: logging.info(\"Start run", "is reversed, it means that either the ref or query", "we need to run these commands: # # nucmer --maxmatch", "+ 1), \".\", query_seqs[variant.qry_name][variant.qry_start], query_seqs[variant.qry_name][variant.qry_start] + variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_INS\",", "f: print(seq, file=f) snp_file = f\"{prefix}.snps\" _run_dnadiff_one_split( ref_fasta, tmp_fasta, snp_file,", "debug=False, split_ref=False, threads=1, maxmatch=True, ): snps_file = f\"{outfile}.tmp.snps\" _run_dnadiff( truth_fasta,", "debug: os.unlink(snp_file) def _snps_file_to_vcf(snps_file, query_fasta, outfile): \"\"\"Loads the .snps file", "to do the appropriate # reverse (complement) fixes so the", "get the nucleotide before the deletion as well. new_record =", ".snps file made by dnadiff. query_fasta = fasta file of", "new_record.CHROM not in vcf_records: vcf_records[new_record.CHROM] = [] vcf_records[new_record.CHROM].append(new_record) for vcf_list", "by dnadiff. query_fasta = fasta file of query sequences. Writes", "sequences. Writes a new VCF file unmerged records.\"\"\" vcf_records =", "pyfastaq.sequences.Fasta(\"x\", variant.ref_base) ref_seq.revcomp() variant.ref_base = ref_seq.seq if variant.var_type == pymummer.variant.SNP:", "commands # in addition to making the snps file. def", "\".\", \".\", \"SVTYPE=DNADIFF_INS\", \"GT\", \"1/1\", ] ) ) elif variant.var_type", "vcf_list in vcf_records.values(): vcf_list.sort(key=attrgetter(\"POS\")) with open(outfile, \"w\") as f: print(\"##fileformat=VCFv4.2\",", "with open(outfile, \"w\") as f: print(\"##fileformat=VCFv4.2\", file=f) for seq in", ") else: raise Exception(\"Unknown variant type: \" + str(variant)) assert", "len(new_record.REF) ] ) if new_record.CHROM not in vcf_records: vcf_records[new_record.CHROM] =", "variant.qry_base) qry_seq.revcomp() variant.qry_base = \"\".join(reversed(qry_seq.seq)) ref_seq = pyfastaq.sequences.Fasta(\"x\", variant.ref_base) ref_seq.revcomp()", "to be # reverse complemented when aligned by mummer. Need", "1), \".\", query_seqs[variant.qry_name][variant.qry_start], query_seqs[variant.qry_name][variant.qry_start] + variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_INS\", \"GT\",", "as well. new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start +", "# query. We're making VCF records w.r.t. the # query,", "the docs inspecting that script, we need to run these", "out.snps # # This is instead of just running show-snps,", "query_seqs[variant.qry_name][variant.qry_start - 1] + variant.qry_base, query_seqs[variant.qry_name][variant.qry_start - 1], \".\", \".\",", "tmp_snp_files = [] seq_reader = pyfastaq.sequences.file_reader(query_fasta) for seq in seq_reader:", "from the dnadiff script from MUMmer. From reading # the", "1] + variant.qry_base, query_seqs[variant.qry_name][variant.qry_start - 1], \".\", \".\", \"SVTYPE=DNADIFF_DEL\", \"GT\",", "> out.1delta # show-snps -rlTHC out.1delta > out.snps # #", "--maxmatch --delta out.delta ref.fasta query.fasta # delta-filter -1 out.delta >", "os.unlink(snp_file) def _snps_file_to_vcf(snps_file, query_fasta, outfile): \"\"\"Loads the .snps file made", "seq_reader: prefix = f\"{outfile}.tmp.split.{len(tmp_snp_files)}\" tmp_fasta = f\"{prefix}.fasta\" with open(tmp_fasta, \"w\")", "variant in variants: # If the variant is reversed, it", "pymummer.variant.INS: # The ref has sequence missing, compared to the", "f\"{outfile}.tmp.1delta\" subprocess.check_output(f\"rm -f {delta} {delta_1}\", shell=True) maxmatch_opt = \"--maxmatch\" if", "ref_fasta, tmp_fasta, snp_file, threads=threads, maxmatch=maxmatch ) os.unlink(tmp_fasta) tmp_snp_files.append(snp_file) with open(outfile,", "debug=debug, threads=threads, maxmatch=maxmatch, ) _snps_file_to_vcf(snps_file, ref_fasta, outfile) if not debug:", "file made by dnadiff. query_fasta = fasta file of query", "f\"{outfile}.tmp.split.{len(tmp_snp_files)}\" tmp_fasta = f\"{prefix}.fasta\" with open(tmp_fasta, \"w\") as f: print(seq,", "= f\"{outfile}.tmp.1delta\" subprocess.check_output(f\"rm -f {delta} {delta_1}\", shell=True) maxmatch_opt = \"--maxmatch\"", "query_seqs.values(): print(f\"##contig=<ID={seq.id},length={len(seq)}>\", file=f) print(\"#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tsample\", file=f) for key, vcf_list in sorted(vcf_records.items()):", "os.unlink(delta) os.unlink(delta_1) def _run_dnadiff( ref_fasta, query_fasta, outfile, split_query=False, debug=False, threads=1,", "in sorted(vcf_records.items()): for record in vcf_list: print(record, file=f) def make_truth_vcf(", "truth_fasta, ref_fasta, snps_file, split_query=split_ref, debug=debug, threads=threads, maxmatch=maxmatch, ) _snps_file_to_vcf(snps_file, ref_fasta,", "{delta} {ref_fasta} {query_fasta}\", f\"delta-filter -1 {delta} > {delta_1}\", f\"show-snps -rlTHC", "import pymummer from cluster_vcf_records import vcf_record from varifier import utils", "fixes so the VCF has the correct REF and ALT", "an insertion. So need to # get the nucleotide before", "open(snp_file, \"rb\") as f_in: shutil.copyfileobj(f_in, f_out) if not debug: os.unlink(snp_file)", "new_record.REF == query_seqs[new_record.CHROM][ new_record.POS : new_record.POS + len(new_record.REF) ] )", "\".\", query_seqs[variant.qry_name][variant.qry_start - 1] + variant.qry_base, query_seqs[variant.qry_name][variant.qry_start - 1], \".\",", "by mummer. Need to do the appropriate # reverse (complement)", "vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start + 1), \".\", query_seqs[variant.qry_name][variant.qry_start], query_seqs[variant.qry_name][variant.qry_start]", "so this is an insertion. So need to # get", "file=f) print(\"#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tsample\", file=f) for key, vcf_list in sorted(vcf_records.items()): for record", "if variant.var_type == pymummer.variant.SNP: new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name,", "sorted(vcf_records.items()): for record in vcf_list: print(record, file=f) def make_truth_vcf( ref_fasta,", ") ) else: raise Exception(\"Unknown variant type: \" + str(variant))", "\"\" commands = [ f\"nucmer --threads {threads} {maxmatch_opt} --delta {delta}", "file=f) snp_file = f\"{prefix}.snps\" _run_dnadiff_one_split( ref_fasta, tmp_fasta, snp_file, threads=threads, maxmatch=maxmatch", "f_out: for snp_file in tmp_snp_files: with open(snp_file, \"rb\") as f_in:", "-1 {delta} > {delta_1}\", f\"show-snps -rlTHC {delta_1} > {outfile}\", ]", "\".\", query_seqs[variant.qry_name][variant.qry_start], query_seqs[variant.qry_name][variant.qry_start] + variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_INS\", \"GT\", \"1/1\",", "for snp_file in tmp_snp_files: with open(snp_file, \"rb\") as f_in: shutil.copyfileobj(f_in,", "run these commands: # # nucmer --maxmatch --delta out.delta ref.fasta", "# the docs inspecting that script, we need to run", "# get the nucleotide before the insertion as well. new_record", "variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_SNP\", \"GT\", \"1/1\", ] ) ) elif", "{outfile}\", ] for command in commands: logging.info(\"Start run command: \"", "# If the variant is reversed, it means that either", "= vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start), \".\", query_seqs[variant.qry_name][variant.qry_start - 1]", "= pyfastaq.sequences.file_reader(query_fasta) for seq in seq_reader: prefix = f\"{outfile}.tmp.split.{len(tmp_snp_files)}\" tmp_fasta", "== query_seqs[new_record.CHROM][ new_record.POS : new_record.POS + len(new_record.REF) ] ) if", "tmp_snp_files.append(snp_file) with open(outfile, \"wb\") as f_out: for snp_file in tmp_snp_files:", "+ variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_INS\", \"GT\", \"1/1\", ] ) )", "This is instead of just running show-snps, which runs several", "maxmatch=True): delta = f\"{outfile}.tmp.delta\" delta_1 = f\"{outfile}.tmp.1delta\" subprocess.check_output(f\"rm -f {delta}", "the # reference. We're making VCF records w.r.t. the #", "command: \" + command) subprocess.check_output(command, shell=True) logging.info(\"Finish run command: \"", "new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start + 1), \".\",", "ref_seq.seq if variant.var_type == pymummer.variant.SNP: new_record = vcf_record.VcfRecord( \"\\t\".join( [", "not in vcf_records: vcf_records[new_record.CHROM] = [] vcf_records[new_record.CHROM].append(new_record) for vcf_list in", "command in commands: logging.info(\"Start run command: \" + command) subprocess.check_output(command,", "missing, compared to the # query. We're making VCF records", "utils # We only want the .snps file from the", "pymummer.variant.SNP: new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start + 1),", "import utils # We only want the .snps file from", "ALT sequences if variant.reverse: qry_seq = pyfastaq.sequences.Fasta(\"x\", variant.qry_base) qry_seq.revcomp() variant.qry_base", "split_ref=False, threads=1, maxmatch=True, ): snps_file = f\"{outfile}.tmp.snps\" _run_dnadiff( truth_fasta, ref_fasta,", "threads=1, maxmatch=True, ): snps_file = f\"{outfile}.tmp.snps\" _run_dnadiff( truth_fasta, ref_fasta, snps_file,", "before the deletion as well. new_record = vcf_record.VcfRecord( \"\\t\".join( [", "is an insertion. So need to # get the nucleotide", "snps file. def _run_dnadiff_one_split(ref_fasta, query_fasta, outfile, threads=1, maxmatch=True): delta =", "import subprocess import pyfastaq import pymummer from cluster_vcf_records import vcf_record", "\"--maxmatch\" if maxmatch else \"\" commands = [ f\"nucmer --threads", "Writes a new VCF file unmerged records.\"\"\" vcf_records = {}", "well. new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start + 1),", "to the # query. We're making VCF records w.r.t. the", "dnadiff script from MUMmer. From reading # the docs inspecting", "show-snps -rlTHC out.1delta > out.snps # # This is instead", "records w.r.t. the # query, so this is a deletion.", ") else: tmp_snp_files = [] seq_reader = pyfastaq.sequences.file_reader(query_fasta) for seq", "f\"show-snps -rlTHC {delta_1} > {outfile}\", ] for command in commands:", "= pyfastaq.sequences.Fasta(\"x\", variant.qry_base) qry_seq.revcomp() variant.qry_base = \"\".join(reversed(qry_seq.seq)) ref_seq = pyfastaq.sequences.Fasta(\"x\",", "before the insertion as well. new_record = vcf_record.VcfRecord( \"\\t\".join( [", "# nucmer --maxmatch --delta out.delta ref.fasta query.fasta # delta-filter -1", "threads=threads, maxmatch=maxmatch ) else: tmp_snp_files = [] seq_reader = pyfastaq.sequences.file_reader(query_fasta)", "which runs several other commands # in addition to making", "vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start + 1), \".\", variant.qry_base, variant.ref_base,", "several other commands # in addition to making the snps", "_run_dnadiff( ref_fasta, query_fasta, outfile, split_query=False, debug=False, threads=1, maxmatch=True, ): if", "tmp_snp_files: with open(snp_file, \"rb\") as f_in: shutil.copyfileobj(f_in, f_out) if not", "print(\"#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tsample\", file=f) for key, vcf_list in sorted(vcf_records.items()): for record in", "these commands: # # nucmer --maxmatch --delta out.delta ref.fasta query.fasta", "snp_file = f\"{prefix}.snps\" _run_dnadiff_one_split( ref_fasta, tmp_fasta, snp_file, threads=threads, maxmatch=maxmatch )", "[ variant.qry_name, str(variant.qry_start + 1), \".\", variant.qry_base, variant.ref_base, \".\", \".\",", "f_in: shutil.copyfileobj(f_in, f_out) if not debug: os.unlink(snp_file) def _snps_file_to_vcf(snps_file, query_fasta,", "] ) if new_record.CHROM not in vcf_records: vcf_records[new_record.CHROM] = []", "script from MUMmer. From reading # the docs inspecting that", "+ variant.qry_base, query_seqs[variant.qry_name][variant.qry_start - 1], \".\", \".\", \"SVTYPE=DNADIFF_DEL\", \"GT\", \"1/1\",", "the variant is reversed, it means that either the ref", "print(f\"##contig=<ID={seq.id},length={len(seq)}>\", file=f) print(\"#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tsample\", file=f) for key, vcf_list in sorted(vcf_records.items()): for", "in vcf_records.values(): vcf_list.sort(key=attrgetter(\"POS\")) with open(outfile, \"w\") as f: print(\"##fileformat=VCFv4.2\", file=f)", "pymummer.snp_file.get_all_variants(snps_file) query_seqs = utils.file_to_dict_of_seqs(query_fasta) for variant in variants: # If", "VCF has the correct REF and ALT sequences if variant.reverse:", "reversed, it means that either the ref or query had", "else \"\" commands = [ f\"nucmer --threads {threads} {maxmatch_opt} --delta", "the nucleotide before the insertion as well. new_record = vcf_record.VcfRecord(", "variant is reversed, it means that either the ref or", "do the appropriate # reverse (complement) fixes so the VCF", "variant.ref_base = ref_seq.seq if variant.var_type == pymummer.variant.SNP: new_record = vcf_record.VcfRecord(", "to run these commands: # # nucmer --maxmatch --delta out.delta", "tmp_fasta = f\"{prefix}.fasta\" with open(tmp_fasta, \"w\") as f: print(seq, file=f)", "{delta_1}\", shell=True) maxmatch_opt = \"--maxmatch\" if maxmatch else \"\" commands", "unmerged records.\"\"\" vcf_records = {} variants = pymummer.snp_file.get_all_variants(snps_file) query_seqs =", "-rlTHC {delta_1} > {outfile}\", ] for command in commands: logging.info(\"Start", "Exception(\"Unknown variant type: \" + str(variant)) assert ( new_record.REF ==", "\"\\t\".join( [ variant.qry_name, str(variant.qry_start), \".\", query_seqs[variant.qry_name][variant.qry_start - 1] + variant.qry_base,", "ref_seq = pyfastaq.sequences.Fasta(\"x\", variant.ref_base) ref_seq.revcomp() variant.ref_base = ref_seq.seq if variant.var_type", "_run_dnadiff_one_split( ref_fasta, query_fasta, outfile, threads=threads, maxmatch=maxmatch ) else: tmp_snp_files =", "file from the dnadiff script from MUMmer. From reading #", "\"SVTYPE=DNADIFF_DEL\", \"GT\", \"1/1\", ] ) ) else: raise Exception(\"Unknown variant", "dnadiff. query_fasta = fasta file of query sequences. Writes a", "in vcf_records: vcf_records[new_record.CHROM] = [] vcf_records[new_record.CHROM].append(new_record) for vcf_list in vcf_records.values():", "shell=True) maxmatch_opt = \"--maxmatch\" if maxmatch else \"\" commands =", "with open(tmp_fasta, \"w\") as f: print(seq, file=f) snp_file = f\"{prefix}.snps\"", "str(variant.qry_start), \".\", query_seqs[variant.qry_name][variant.qry_start - 1] + variant.qry_base, query_seqs[variant.qry_name][variant.qry_start - 1],", "variants = pymummer.snp_file.get_all_variants(snps_file) query_seqs = utils.file_to_dict_of_seqs(query_fasta) for variant in variants:", "that either the ref or query had to be #", "for seq in query_seqs.values(): print(f\"##contig=<ID={seq.id},length={len(seq)}>\", file=f) print(\"#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tsample\", file=f) for key,", "variant.ref_base) ref_seq.revcomp() variant.ref_base = ref_seq.seq if variant.var_type == pymummer.variant.SNP: new_record", "- 1], \".\", \".\", \"SVTYPE=DNADIFF_DEL\", \"GT\", \"1/1\", ] ) )", "--delta out.delta ref.fasta query.fasta # delta-filter -1 out.delta > out.1delta", "] for command in commands: logging.info(\"Start run command: \" +", "import logging import os import shutil import subprocess import pyfastaq", "_snps_file_to_vcf(snps_file, query_fasta, outfile): \"\"\"Loads the .snps file made by dnadiff.", "the insertion as well. new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name,", "MUMmer. From reading # the docs inspecting that script, we", "= f\"{outfile}.tmp.delta\" delta_1 = f\"{outfile}.tmp.1delta\" subprocess.check_output(f\"rm -f {delta} {delta_1}\", shell=True)", "import attrgetter import logging import os import shutil import subprocess", "when aligned by mummer. Need to do the appropriate #", "pymummer from cluster_vcf_records import vcf_record from varifier import utils #", "REF and ALT sequences if variant.reverse: qry_seq = pyfastaq.sequences.Fasta(\"x\", variant.qry_base)", "delta-filter -1 out.delta > out.1delta # show-snps -rlTHC out.1delta >", "pyfastaq import pymummer from cluster_vcf_records import vcf_record from varifier import", "variant type: \" + str(variant)) assert ( new_record.REF == query_seqs[new_record.CHROM][", "sequences if variant.reverse: qry_seq = pyfastaq.sequences.Fasta(\"x\", variant.qry_base) qry_seq.revcomp() variant.qry_base =", "\".\", \".\", \"SVTYPE=DNADIFF_DEL\", \"GT\", \"1/1\", ] ) ) else: raise", ") if new_record.CHROM not in vcf_records: vcf_records[new_record.CHROM] = [] vcf_records[new_record.CHROM].append(new_record)", "pyfastaq.sequences.file_reader(query_fasta) for seq in seq_reader: prefix = f\"{outfile}.tmp.split.{len(tmp_snp_files)}\" tmp_fasta =", "# reverse complemented when aligned by mummer. Need to do", "# reverse (complement) fixes so the VCF has the correct", ".snps file from the dnadiff script from MUMmer. From reading", "> out.snps # # This is instead of just running", ") os.unlink(tmp_fasta) tmp_snp_files.append(snp_file) with open(outfile, \"wb\") as f_out: for snp_file", "= vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start + 1), \".\", variant.qry_base,", "if new_record.CHROM not in vcf_records: vcf_records[new_record.CHROM] = [] vcf_records[new_record.CHROM].append(new_record) for", "the # query, so this is an insertion. So need", "to # get the nucleotide before the insertion as well.", "# # nucmer --maxmatch --delta out.delta ref.fasta query.fasta # delta-filter", "def _run_dnadiff_one_split(ref_fasta, query_fasta, outfile, threads=1, maxmatch=True): delta = f\"{outfile}.tmp.delta\" delta_1", "= f\"{prefix}.snps\" _run_dnadiff_one_split( ref_fasta, tmp_fasta, snp_file, threads=threads, maxmatch=maxmatch ) os.unlink(tmp_fasta)", "need to run these commands: # # nucmer --maxmatch --delta", "\"\\t\".join( [ variant.qry_name, str(variant.qry_start + 1), \".\", query_seqs[variant.qry_name][variant.qry_start], query_seqs[variant.qry_name][variant.qry_start] +", "vcf_records.values(): vcf_list.sort(key=attrgetter(\"POS\")) with open(outfile, \"w\") as f: print(\"##fileformat=VCFv4.2\", file=f) for", "\" + command) subprocess.check_output(command, shell=True) logging.info(\"Finish run command: \" +", "new VCF file unmerged records.\"\"\" vcf_records = {} variants =", "variant.qry_base, query_seqs[variant.qry_name][variant.qry_start - 1], \".\", \".\", \"SVTYPE=DNADIFF_DEL\", \"GT\", \"1/1\", ]", "query. We're making VCF records w.r.t. the # query, so", "# query, so this is a deletion. So need to", "\"\"\"Loads the .snps file made by dnadiff. query_fasta = fasta", "\".\", \"SVTYPE=DNADIFF_DEL\", \"GT\", \"1/1\", ] ) ) else: raise Exception(\"Unknown", "query_seqs = utils.file_to_dict_of_seqs(query_fasta) for variant in variants: # If the", "\"1/1\", ] ) ) elif variant.var_type == pymummer.variant.INS: # The", "outfile): \"\"\"Loads the .snps file made by dnadiff. query_fasta =", "be # reverse complemented when aligned by mummer. Need to", "appropriate # reverse (complement) fixes so the VCF has the", "_run_dnadiff_one_split( ref_fasta, tmp_fasta, snp_file, threads=threads, maxmatch=maxmatch ) os.unlink(tmp_fasta) tmp_snp_files.append(snp_file) with", "# in addition to making the snps file. def _run_dnadiff_one_split(ref_fasta,", "command) os.unlink(delta) os.unlink(delta_1) def _run_dnadiff( ref_fasta, query_fasta, outfile, split_query=False, debug=False,", "] ) ) elif variant.var_type == pymummer.variant.INS: # The ref", "nucmer --maxmatch --delta out.delta ref.fasta query.fasta # delta-filter -1 out.delta", "making the snps file. def _run_dnadiff_one_split(ref_fasta, query_fasta, outfile, threads=1, maxmatch=True):", "only want the .snps file from the dnadiff script from", "fasta file of query sequences. Writes a new VCF file", "1], \".\", \".\", \"SVTYPE=DNADIFF_DEL\", \"GT\", \"1/1\", ] ) ) else:", "reference. We're making VCF records w.r.t. the # query, so", "the snps file. def _run_dnadiff_one_split(ref_fasta, query_fasta, outfile, threads=1, maxmatch=True): delta", "query_fasta, outfile, threads=1, maxmatch=True): delta = f\"{outfile}.tmp.delta\" delta_1 = f\"{outfile}.tmp.1delta\"", "os import shutil import subprocess import pyfastaq import pymummer from", "vcf_list in sorted(vcf_records.items()): for record in vcf_list: print(record, file=f) def", "a deletion. So need to # get the nucleotide before", "shutil.copyfileobj(f_in, f_out) if not debug: os.unlink(snp_file) def _snps_file_to_vcf(snps_file, query_fasta, outfile):", "for variant in variants: # If the variant is reversed,", "snp_file in tmp_snp_files: with open(snp_file, \"rb\") as f_in: shutil.copyfileobj(f_in, f_out)", "inspecting that script, we need to run these commands: #", "the appropriate # reverse (complement) fixes so the VCF has", "import vcf_record from varifier import utils # We only want", "\"GT\", \"1/1\", ] ) ) elif variant.var_type == pymummer.variant.INS: #", "= ref_seq.seq if variant.var_type == pymummer.variant.SNP: new_record = vcf_record.VcfRecord( \"\\t\".join(", "= {} variants = pymummer.snp_file.get_all_variants(snps_file) query_seqs = utils.file_to_dict_of_seqs(query_fasta) for variant", "new_record.POS : new_record.POS + len(new_record.REF) ] ) if new_record.CHROM not", "ref_seq.revcomp() variant.ref_base = ref_seq.seq if variant.var_type == pymummer.variant.SNP: new_record =", "variant.qry_name, str(variant.qry_start), \".\", query_seqs[variant.qry_name][variant.qry_start - 1] + variant.qry_base, query_seqs[variant.qry_name][variant.qry_start -", "( new_record.REF == query_seqs[new_record.CHROM][ new_record.POS : new_record.POS + len(new_record.REF) ]", "logging.info(\"Start run command: \" + command) subprocess.check_output(command, shell=True) logging.info(\"Finish run", "in variants: # If the variant is reversed, it means", "elif variant.var_type == pymummer.variant.INS: # The ref has sequence missing,", "threads=1, maxmatch=True, ): if not split_query: _run_dnadiff_one_split( ref_fasta, query_fasta, outfile,", "query_fasta = fasta file of query sequences. Writes a new", "+ command) subprocess.check_output(command, shell=True) logging.info(\"Finish run command: \" + command)", "query had to be # reverse complemented when aligned by", "shutil import subprocess import pyfastaq import pymummer from cluster_vcf_records import", "vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start), \".\", query_seqs[variant.qry_name][variant.qry_start - 1] +", "need to # get the nucleotide before the deletion as", "varifier import utils # We only want the .snps file", "= pyfastaq.sequences.Fasta(\"x\", variant.ref_base) ref_seq.revcomp() variant.ref_base = ref_seq.seq if variant.var_type ==", "{ref_fasta} {query_fasta}\", f\"delta-filter -1 {delta} > {delta_1}\", f\"show-snps -rlTHC {delta_1}", "out.delta > out.1delta # show-snps -rlTHC out.1delta > out.snps #", "vcf_records: vcf_records[new_record.CHROM] = [] vcf_records[new_record.CHROM].append(new_record) for vcf_list in vcf_records.values(): vcf_list.sort(key=attrgetter(\"POS\"))", "is instead of just running show-snps, which runs several other", "= [] vcf_records[new_record.CHROM].append(new_record) for vcf_list in vcf_records.values(): vcf_list.sort(key=attrgetter(\"POS\")) with open(outfile,", "- 1] + variant.qry_base, query_seqs[variant.qry_name][variant.qry_start - 1], \".\", \".\", \"SVTYPE=DNADIFF_DEL\",", "has sequence missing, compared to the # query. We're making", "sequence missing, compared to the # query. We're making VCF", "if not debug: os.unlink(snp_file) def _snps_file_to_vcf(snps_file, query_fasta, outfile): \"\"\"Loads the", "the # query, so this is a deletion. So need", "{delta} {delta_1}\", shell=True) maxmatch_opt = \"--maxmatch\" if maxmatch else \"\"", "Need to do the appropriate # reverse (complement) fixes so", "new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start), \".\", query_seqs[variant.qry_name][variant.qry_start -", "> {outfile}\", ] for command in commands: logging.info(\"Start run command:", "query_seqs[variant.qry_name][variant.qry_start], query_seqs[variant.qry_name][variant.qry_start] + variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_INS\", \"GT\", \"1/1\", ]", "] ) ) else: raise Exception(\"Unknown variant type: \" +", "in vcf_list: print(record, file=f) def make_truth_vcf( ref_fasta, truth_fasta, outfile, debug=False,", "records w.r.t. the # query, so this is an insertion.", "in query_seqs.values(): print(f\"##contig=<ID={seq.id},length={len(seq)}>\", file=f) print(\"#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tsample\", file=f) for key, vcf_list in", "reading # the docs inspecting that script, we need to", "for record in vcf_list: print(record, file=f) def make_truth_vcf( ref_fasta, truth_fasta,", "_run_dnadiff_one_split(ref_fasta, query_fasta, outfile, threads=1, maxmatch=True): delta = f\"{outfile}.tmp.delta\" delta_1 =", "+ command) os.unlink(delta) os.unlink(delta_1) def _run_dnadiff( ref_fasta, query_fasta, outfile, split_query=False,", "outfile, split_query=False, debug=False, threads=1, maxmatch=True, ): if not split_query: _run_dnadiff_one_split(", "complemented when aligned by mummer. Need to do the appropriate", "threads=threads, maxmatch=maxmatch, ) _snps_file_to_vcf(snps_file, ref_fasta, outfile) if not debug: os.unlink(snps_file)", "variant.var_type == pymummer.variant.INS: # The ref has sequence missing, compared", "From reading # the docs inspecting that script, we need", "docs inspecting that script, we need to run these commands:", "= \"--maxmatch\" if maxmatch else \"\" commands = [ f\"nucmer", "assert ( new_record.REF == query_seqs[new_record.CHROM][ new_record.POS : new_record.POS + len(new_record.REF)", "the nucleotide before the deletion as well. new_record = vcf_record.VcfRecord(", "pyfastaq.sequences.Fasta(\"x\", variant.qry_base) qry_seq.revcomp() variant.qry_base = \"\".join(reversed(qry_seq.seq)) ref_seq = pyfastaq.sequences.Fasta(\"x\", variant.ref_base)", "ref_fasta, snps_file, split_query=split_ref, debug=debug, threads=threads, maxmatch=maxmatch, ) _snps_file_to_vcf(snps_file, ref_fasta, outfile)", "command: \" + command) os.unlink(delta) os.unlink(delta_1) def _run_dnadiff( ref_fasta, query_fasta,", "= fasta file of query sequences. Writes a new VCF", "query_fasta, outfile): \"\"\"Loads the .snps file made by dnadiff. query_fasta", "\"GT\", \"1/1\", ] ) ) else: raise Exception(\"Unknown variant type:", "vcf_records[new_record.CHROM].append(new_record) for vcf_list in vcf_records.values(): vcf_list.sort(key=attrgetter(\"POS\")) with open(outfile, \"w\") as", "file=f) for key, vcf_list in sorted(vcf_records.items()): for record in vcf_list:", "operator import attrgetter import logging import os import shutil import", "else: raise Exception(\"Unknown variant type: \" + str(variant)) assert (", "query_fasta, outfile, split_query=False, debug=False, threads=1, maxmatch=True, ): if not split_query:", "run command: \" + command) os.unlink(delta) os.unlink(delta_1) def _run_dnadiff( ref_fasta,", "VCF records w.r.t. the # query, so this is an", "# query, so this is an insertion. So need to", "] ) ) elif variant.var_type == pymummer.variant.DEL: # The query", "maxmatch=True, ): if not split_query: _run_dnadiff_one_split( ref_fasta, query_fasta, outfile, threads=threads,", "query_fasta, outfile, threads=threads, maxmatch=maxmatch ) else: tmp_snp_files = [] seq_reader", "the ref or query had to be # reverse complemented", "query_seqs[variant.qry_name][variant.qry_start] + variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_INS\", \"GT\", \"1/1\", ] )", "So need to # get the nucleotide before the deletion", "\"rb\") as f_in: shutil.copyfileobj(f_in, f_out) if not debug: os.unlink(snp_file) def", "== pymummer.variant.DEL: # The query has sequence missing, compared to", "as f: print(\"##fileformat=VCFv4.2\", file=f) for seq in query_seqs.values(): print(f\"##contig=<ID={seq.id},length={len(seq)}>\", file=f)", "{delta_1}\", f\"show-snps -rlTHC {delta_1} > {outfile}\", ] for command in", "commands: logging.info(\"Start run command: \" + command) subprocess.check_output(command, shell=True) logging.info(\"Finish", "f_out) if not debug: os.unlink(snp_file) def _snps_file_to_vcf(snps_file, query_fasta, outfile): \"\"\"Loads", "utils.file_to_dict_of_seqs(query_fasta) for variant in variants: # If the variant is", "variant.reverse: qry_seq = pyfastaq.sequences.Fasta(\"x\", variant.qry_base) qry_seq.revcomp() variant.qry_base = \"\".join(reversed(qry_seq.seq)) ref_seq", "else: tmp_snp_files = [] seq_reader = pyfastaq.sequences.file_reader(query_fasta) for seq in", "subprocess.check_output(f\"rm -f {delta} {delta_1}\", shell=True) maxmatch_opt = \"--maxmatch\" if maxmatch", "-f {delta} {delta_1}\", shell=True) maxmatch_opt = \"--maxmatch\" if maxmatch else", "maxmatch_opt = \"--maxmatch\" if maxmatch else \"\" commands = [", "running show-snps, which runs several other commands # in addition", "maxmatch=maxmatch ) else: tmp_snp_files = [] seq_reader = pyfastaq.sequences.file_reader(query_fasta) for", "== pymummer.variant.INS: # The ref has sequence missing, compared to", "split_query=False, debug=False, threads=1, maxmatch=True, ): if not split_query: _run_dnadiff_one_split( ref_fasta,", "records.\"\"\" vcf_records = {} variants = pymummer.snp_file.get_all_variants(snps_file) query_seqs = utils.file_to_dict_of_seqs(query_fasta)", "{query_fasta}\", f\"delta-filter -1 {delta} > {delta_1}\", f\"show-snps -rlTHC {delta_1} >", "We only want the .snps file from the dnadiff script", "# # This is instead of just running show-snps, which", "aligned by mummer. Need to do the appropriate # reverse", "the .snps file from the dnadiff script from MUMmer. From", "in seq_reader: prefix = f\"{outfile}.tmp.split.{len(tmp_snp_files)}\" tmp_fasta = f\"{prefix}.fasta\" with open(tmp_fasta,", "# The query has sequence missing, compared to the #", "def _run_dnadiff( ref_fasta, query_fasta, outfile, split_query=False, debug=False, threads=1, maxmatch=True, ):", "variant.var_type == pymummer.variant.DEL: # The query has sequence missing, compared", "[] vcf_records[new_record.CHROM].append(new_record) for vcf_list in vcf_records.values(): vcf_list.sort(key=attrgetter(\"POS\")) with open(outfile, \"w\")", "correct REF and ALT sequences if variant.reverse: qry_seq = pyfastaq.sequences.Fasta(\"x\",", "the correct REF and ALT sequences if variant.reverse: qry_seq =", "show-snps, which runs several other commands # in addition to", "as f: print(seq, file=f) snp_file = f\"{prefix}.snps\" _run_dnadiff_one_split( ref_fasta, tmp_fasta,", "as f_in: shutil.copyfileobj(f_in, f_out) if not debug: os.unlink(snp_file) def _snps_file_to_vcf(snps_file,", "nucleotide before the deletion as well. new_record = vcf_record.VcfRecord( \"\\t\".join(", "ref_fasta, query_fasta, outfile, split_query=False, debug=False, threads=1, maxmatch=True, ): if not", "the dnadiff script from MUMmer. From reading # the docs", "other commands # in addition to making the snps file.", "nucleotide before the insertion as well. new_record = vcf_record.VcfRecord( \"\\t\".join(", "= [] seq_reader = pyfastaq.sequences.file_reader(query_fasta) for seq in seq_reader: prefix", "query_seqs[variant.qry_name][variant.qry_start - 1], \".\", \".\", \"SVTYPE=DNADIFF_DEL\", \"GT\", \"1/1\", ] )", "for key, vcf_list in sorted(vcf_records.items()): for record in vcf_list: print(record,", "out.1delta # show-snps -rlTHC out.1delta > out.snps # # This", "outfile, debug=False, split_ref=False, threads=1, maxmatch=True, ): snps_file = f\"{outfile}.tmp.snps\" _run_dnadiff(", "= utils.file_to_dict_of_seqs(query_fasta) for variant in variants: # If the variant", "from varifier import utils # We only want the .snps", "maxmatch else \"\" commands = [ f\"nucmer --threads {threads} {maxmatch_opt}", "(complement) fixes so the VCF has the correct REF and", "The ref has sequence missing, compared to the # query.", "out.delta ref.fasta query.fasta # delta-filter -1 out.delta > out.1delta #", "seq_reader = pyfastaq.sequences.file_reader(query_fasta) for seq in seq_reader: prefix = f\"{outfile}.tmp.split.{len(tmp_snp_files)}\"", "from cluster_vcf_records import vcf_record from varifier import utils # We", "f\"{outfile}.tmp.delta\" delta_1 = f\"{outfile}.tmp.1delta\" subprocess.check_output(f\"rm -f {delta} {delta_1}\", shell=True) maxmatch_opt", "get the nucleotide before the insertion as well. new_record =", "f\"{prefix}.snps\" _run_dnadiff_one_split( ref_fasta, tmp_fasta, snp_file, threads=threads, maxmatch=maxmatch ) os.unlink(tmp_fasta) tmp_snp_files.append(snp_file)", "+ 1), \".\", variant.qry_base, variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_SNP\", \"GT\", \"1/1\",", "want the .snps file from the dnadiff script from MUMmer.", "ref.fasta query.fasta # delta-filter -1 out.delta > out.1delta # show-snps", "as f_out: for snp_file in tmp_snp_files: with open(snp_file, \"rb\") as", "with open(outfile, \"wb\") as f_out: for snp_file in tmp_snp_files: with", "import pyfastaq import pymummer from cluster_vcf_records import vcf_record from varifier", "\".\", variant.qry_base, variant.ref_base, \".\", \".\", \"SVTYPE=DNADIFF_SNP\", \"GT\", \"1/1\", ] )", "So need to # get the nucleotide before the insertion", "deletion as well. new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start),", "compared to the # reference. We're making VCF records w.r.t.", "reverse (complement) fixes so the VCF has the correct REF", "): snps_file = f\"{outfile}.tmp.snps\" _run_dnadiff( truth_fasta, ref_fasta, snps_file, split_query=split_ref, debug=debug,", "so this is a deletion. So need to # get", "\"SVTYPE=DNADIFF_SNP\", \"GT\", \"1/1\", ] ) ) elif variant.var_type == pymummer.variant.DEL:", "query, so this is a deletion. So need to #", "seq in query_seqs.values(): print(f\"##contig=<ID={seq.id},length={len(seq)}>\", file=f) print(\"#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tsample\", file=f) for key, vcf_list", "subprocess.check_output(command, shell=True) logging.info(\"Finish run command: \" + command) os.unlink(delta) os.unlink(delta_1)", "commands: # # nucmer --maxmatch --delta out.delta ref.fasta query.fasta #", "-rlTHC out.1delta > out.snps # # This is instead of", "f\"{prefix}.fasta\" with open(tmp_fasta, \"w\") as f: print(seq, file=f) snp_file =", "out.1delta > out.snps # # This is instead of just", "print(record, file=f) def make_truth_vcf( ref_fasta, truth_fasta, outfile, debug=False, split_ref=False, threads=1,", "# show-snps -rlTHC out.1delta > out.snps # # This is", "# This is instead of just running show-snps, which runs", "\"1/1\", ] ) ) elif variant.var_type == pymummer.variant.DEL: # The", "this is an insertion. So need to # get the", "commands = [ f\"nucmer --threads {threads} {maxmatch_opt} --delta {delta} {ref_fasta}", "ref has sequence missing, compared to the # query. We're", "\"wb\") as f_out: for snp_file in tmp_snp_files: with open(snp_file, \"rb\")", "to making the snps file. def _run_dnadiff_one_split(ref_fasta, query_fasta, outfile, threads=1,", "ref or query had to be # reverse complemented when", ") elif variant.var_type == pymummer.variant.INS: # The ref has sequence", "in tmp_snp_files: with open(snp_file, \"rb\") as f_in: shutil.copyfileobj(f_in, f_out) if", "and ALT sequences if variant.reverse: qry_seq = pyfastaq.sequences.Fasta(\"x\", variant.qry_base) qry_seq.revcomp()", "print(seq, file=f) snp_file = f\"{prefix}.snps\" _run_dnadiff_one_split( ref_fasta, tmp_fasta, snp_file, threads=threads,", "\"GT\", \"1/1\", ] ) ) elif variant.var_type == pymummer.variant.DEL: #", "seq in seq_reader: prefix = f\"{outfile}.tmp.split.{len(tmp_snp_files)}\" tmp_fasta = f\"{prefix}.fasta\" with", "the .snps file made by dnadiff. query_fasta = fasta file", "with open(snp_file, \"rb\") as f_in: shutil.copyfileobj(f_in, f_out) if not debug:", "# We only want the .snps file from the dnadiff", "[ f\"nucmer --threads {threads} {maxmatch_opt} --delta {delta} {ref_fasta} {query_fasta}\", f\"delta-filter", "f\"{outfile}.tmp.snps\" _run_dnadiff( truth_fasta, ref_fasta, snps_file, split_query=split_ref, debug=debug, threads=threads, maxmatch=maxmatch, )", "either the ref or query had to be # reverse", "file=f) for seq in query_seqs.values(): print(f\"##contig=<ID={seq.id},length={len(seq)}>\", file=f) print(\"#CHROM\\tPOS\\tID\\tREF\\tALT\\tQUAL\\tFILTER\\tINFO\\tFORMAT\\tsample\", file=f) for", "query.fasta # delta-filter -1 out.delta > out.1delta # show-snps -rlTHC", "it means that either the ref or query had to", "pymummer.variant.DEL: # The query has sequence missing, compared to the", "script, we need to run these commands: # # nucmer", "of just running show-snps, which runs several other commands #", "just running show-snps, which runs several other commands # in", "# reference. We're making VCF records w.r.t. the # query,", "\".\", \"SVTYPE=DNADIFF_INS\", \"GT\", \"1/1\", ] ) ) elif variant.var_type ==", "\"\".join(reversed(qry_seq.seq)) ref_seq = pyfastaq.sequences.Fasta(\"x\", variant.ref_base) ref_seq.revcomp() variant.ref_base = ref_seq.seq if", "as well. new_record = vcf_record.VcfRecord( \"\\t\".join( [ variant.qry_name, str(variant.qry_start), \".\",", "open(outfile, \"w\") as f: print(\"##fileformat=VCFv4.2\", file=f) for seq in query_seqs.values():", "maxmatch=True, ): snps_file = f\"{outfile}.tmp.snps\" _run_dnadiff( truth_fasta, ref_fasta, snps_file, split_query=split_ref,", "make_truth_vcf( ref_fasta, truth_fasta, outfile, debug=False, split_ref=False, threads=1, maxmatch=True, ): snps_file", "{threads} {maxmatch_opt} --delta {delta} {ref_fasta} {query_fasta}\", f\"delta-filter -1 {delta} >", "in addition to making the snps file. def _run_dnadiff_one_split(ref_fasta, query_fasta,", "= f\"{prefix}.fasta\" with open(tmp_fasta, \"w\") as f: print(seq, file=f) snp_file", "logging import os import shutil import subprocess import pyfastaq import", "w.r.t. the # query, so this is a deletion. So", "= f\"{outfile}.tmp.split.{len(tmp_snp_files)}\" tmp_fasta = f\"{prefix}.fasta\" with open(tmp_fasta, \"w\") as f:", "{maxmatch_opt} --delta {delta} {ref_fasta} {query_fasta}\", f\"delta-filter -1 {delta} > {delta_1}\",", "= \"\".join(reversed(qry_seq.seq)) ref_seq = pyfastaq.sequences.Fasta(\"x\", variant.ref_base) ref_seq.revcomp() variant.ref_base = ref_seq.seq", "split_query=split_ref, debug=debug, threads=threads, maxmatch=maxmatch, ) _snps_file_to_vcf(snps_file, ref_fasta, outfile) if not", "\".\", \".\", \"SVTYPE=DNADIFF_SNP\", \"GT\", \"1/1\", ] ) ) elif variant.var_type" ]
[ "-> Dict[str, Any]: return asdict(self) @dataclass(frozen=True) class Contributor: name: str", "model for various reports\"\"\" @classmethod def key(cls: Type) -> str:", "def to_table(self) -> List[str]: return [ 'Pull requests:', '--------------', 'Open", "+ [f'{c.name.ljust(24)}{c.commit_count}' for c in self.contributors] @dataclass(frozen=True) class PullRequestStats(StatsBaseModel): open_count:", "old_count: int def to_table(self) -> List[str]: return [ 'Pull requests:',", "return asdict(self) @dataclass(frozen=True) class Contributor: name: str commit_count: int @dataclass(frozen=True)", "@dataclass(frozen=True) class Contributor: name: str commit_count: int @dataclass(frozen=True) class ContributorStats(StatsBaseModel):", "int closed_count: int old_count: int def to_table(self) -> List[str]: return", "'Pull requests:', '--------------', 'Open Closed Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}' ] @dataclass(frozen=True) class", "for various reports\"\"\" @classmethod def key(cls: Type) -> str: name", "def to_table(self) -> List[str]: return [ 'Issues:', '-------', 'Open Closed", "-> str: name = cls.__name__ return name[0].lower() + name[1:] def", "Any]: return asdict(self) @dataclass(frozen=True) class Contributor: name: str commit_count: int", "class PullRequestStats(StatsBaseModel): open_count: int closed_count: int old_count: int def to_table(self)", "key(cls: Type) -> str: name = cls.__name__ return name[0].lower() +", "(' ' * 20) + 'Commits', ] + [f'{c.name.ljust(24)}{c.commit_count}' for", "contributors: List[Contributor] def to_table(self) -> List[str]: return [ 'Most active", "import asdict, dataclass from typing import Any, Dict, List, Type", "class Contributor: name: str commit_count: int @dataclass(frozen=True) class ContributorStats(StatsBaseModel): contributors:", "int old_count: int def to_table(self) -> List[str]: return [ 'Pull", "-> List[str]: return [ 'Issues:', '-------', 'Open Closed Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}'", "f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}' ] @dataclass(frozen=True) class IssueStats(StatsBaseModel): open_count: int closed_count: int old_count:", "@dataclass(frozen=True) class ContributorStats(StatsBaseModel): contributors: List[Contributor] def to_table(self) -> List[str]: return", "name[0].lower() + name[1:] def to_table(self) -> List[str]: raise NotImplementedError def", "Type) -> str: name = cls.__name__ return name[0].lower() + name[1:]", "+ (' ' * 20) + 'Commits', ] + [f'{c.name.ljust(24)}{c.commit_count}'", "asdict(self) @dataclass(frozen=True) class Contributor: name: str commit_count: int @dataclass(frozen=True) class", "def key(cls: Type) -> str: name = cls.__name__ return name[0].lower()", "in self.contributors] @dataclass(frozen=True) class PullRequestStats(StatsBaseModel): open_count: int closed_count: int old_count:", "+ 'Commits', ] + [f'{c.name.ljust(24)}{c.commit_count}' for c in self.contributors] @dataclass(frozen=True)", "for c in self.contributors] @dataclass(frozen=True) class PullRequestStats(StatsBaseModel): open_count: int closed_count:", "List[str]: return [ 'Issues:', '-------', 'Open Closed Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}' ]", "cls.__name__ return name[0].lower() + name[1:] def to_table(self) -> List[str]: raise", "from dataclasses import asdict, dataclass from typing import Any, Dict,", "class StatsBaseModel: \"\"\"Base model for various reports\"\"\" @classmethod def key(cls:", "int @dataclass(frozen=True) class ContributorStats(StatsBaseModel): contributors: List[Contributor] def to_table(self) -> List[str]:", "contributors:', '-------------------------', 'Name' + (' ' * 20) + 'Commits',", "\"\"\"Base model for various reports\"\"\" @classmethod def key(cls: Type) ->", "+ name[1:] def to_table(self) -> List[str]: raise NotImplementedError def to_dict(self)", "return name[0].lower() + name[1:] def to_table(self) -> List[str]: raise NotImplementedError", "'Name' + (' ' * 20) + 'Commits', ] +", "[f'{c.name.ljust(24)}{c.commit_count}' for c in self.contributors] @dataclass(frozen=True) class PullRequestStats(StatsBaseModel): open_count: int", "reports\"\"\" @classmethod def key(cls: Type) -> str: name = cls.__name__", "@dataclass(frozen=True) class PullRequestStats(StatsBaseModel): open_count: int closed_count: int old_count: int def", "'--------------', 'Open Closed Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}' ] @dataclass(frozen=True) class IssueStats(StatsBaseModel): open_count:", "int def to_table(self) -> List[str]: return [ 'Issues:', '-------', 'Open", "Closed Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}' ] @dataclass(frozen=True) class IssueStats(StatsBaseModel): open_count: int closed_count:", "from typing import Any, Dict, List, Type @dataclass(frozen=True) class StatsBaseModel:", "] @dataclass(frozen=True) class IssueStats(StatsBaseModel): open_count: int closed_count: int old_count: int", "StatsBaseModel: \"\"\"Base model for various reports\"\"\" @classmethod def key(cls: Type)", "various reports\"\"\" @classmethod def key(cls: Type) -> str: name =", "PullRequestStats(StatsBaseModel): open_count: int closed_count: int old_count: int def to_table(self) ->", "20) + 'Commits', ] + [f'{c.name.ljust(24)}{c.commit_count}' for c in self.contributors]", "IssueStats(StatsBaseModel): open_count: int closed_count: int old_count: int def to_table(self) ->", "name: str commit_count: int @dataclass(frozen=True) class ContributorStats(StatsBaseModel): contributors: List[Contributor] def", "Type @dataclass(frozen=True) class StatsBaseModel: \"\"\"Base model for various reports\"\"\" @classmethod", "to_dict(self) -> Dict[str, Any]: return asdict(self) @dataclass(frozen=True) class Contributor: name:", "name[1:] def to_table(self) -> List[str]: raise NotImplementedError def to_dict(self) ->", "Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}' ] @dataclass(frozen=True) class IssueStats(StatsBaseModel): open_count: int closed_count: int", "str: name = cls.__name__ return name[0].lower() + name[1:] def to_table(self)", "' * 20) + 'Commits', ] + [f'{c.name.ljust(24)}{c.commit_count}' for c", "return [ 'Pull requests:', '--------------', 'Open Closed Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}' ]", "@dataclass(frozen=True) class StatsBaseModel: \"\"\"Base model for various reports\"\"\" @classmethod def", "Dict, List, Type @dataclass(frozen=True) class StatsBaseModel: \"\"\"Base model for various", "class IssueStats(StatsBaseModel): open_count: int closed_count: int old_count: int def to_table(self)", "int def to_table(self) -> List[str]: return [ 'Pull requests:', '--------------',", "asdict, dataclass from typing import Any, Dict, List, Type @dataclass(frozen=True)", "import Any, Dict, List, Type @dataclass(frozen=True) class StatsBaseModel: \"\"\"Base model", "to_table(self) -> List[str]: return [ 'Issues:', '-------', 'Open Closed Old',", "def to_dict(self) -> Dict[str, Any]: return asdict(self) @dataclass(frozen=True) class Contributor:", "Contributor: name: str commit_count: int @dataclass(frozen=True) class ContributorStats(StatsBaseModel): contributors: List[Contributor]", "ContributorStats(StatsBaseModel): contributors: List[Contributor] def to_table(self) -> List[str]: return [ 'Most", "'-------------------------', 'Name' + (' ' * 20) + 'Commits', ]", "dataclasses import asdict, dataclass from typing import Any, Dict, List,", "name = cls.__name__ return name[0].lower() + name[1:] def to_table(self) ->", "= cls.__name__ return name[0].lower() + name[1:] def to_table(self) -> List[str]:", "old_count: int def to_table(self) -> List[str]: return [ 'Issues:', '-------',", "[ 'Most active contributors:', '-------------------------', 'Name' + (' ' *", "open_count: int closed_count: int old_count: int def to_table(self) -> List[str]:", "NotImplementedError def to_dict(self) -> Dict[str, Any]: return asdict(self) @dataclass(frozen=True) class", "List[str]: raise NotImplementedError def to_dict(self) -> Dict[str, Any]: return asdict(self)", "-> List[str]: raise NotImplementedError def to_dict(self) -> Dict[str, Any]: return", "to_table(self) -> List[str]: return [ 'Pull requests:', '--------------', 'Open Closed", "-> List[str]: return [ 'Pull requests:', '--------------', 'Open Closed Old',", "<gh_stars>0 from dataclasses import asdict, dataclass from typing import Any,", "closed_count: int old_count: int def to_table(self) -> List[str]: return [", "List, Type @dataclass(frozen=True) class StatsBaseModel: \"\"\"Base model for various reports\"\"\"", "int old_count: int def to_table(self) -> List[str]: return [ 'Issues:',", "self.contributors] @dataclass(frozen=True) class PullRequestStats(StatsBaseModel): open_count: int closed_count: int old_count: int", "active contributors:', '-------------------------', 'Name' + (' ' * 20) +", "def to_table(self) -> List[str]: raise NotImplementedError def to_dict(self) -> Dict[str,", "@classmethod def key(cls: Type) -> str: name = cls.__name__ return", "raise NotImplementedError def to_dict(self) -> Dict[str, Any]: return asdict(self) @dataclass(frozen=True)", "requests:', '--------------', 'Open Closed Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}' ] @dataclass(frozen=True) class IssueStats(StatsBaseModel):", "c in self.contributors] @dataclass(frozen=True) class PullRequestStats(StatsBaseModel): open_count: int closed_count: int", "class ContributorStats(StatsBaseModel): contributors: List[Contributor] def to_table(self) -> List[str]: return [", "return [ 'Most active contributors:', '-------------------------', 'Name' + (' '", "List[str]: return [ 'Most active contributors:', '-------------------------', 'Name' + ('", "commit_count: int @dataclass(frozen=True) class ContributorStats(StatsBaseModel): contributors: List[Contributor] def to_table(self) ->", "dataclass from typing import Any, Dict, List, Type @dataclass(frozen=True) class", "@dataclass(frozen=True) class IssueStats(StatsBaseModel): open_count: int closed_count: int old_count: int def", "str commit_count: int @dataclass(frozen=True) class ContributorStats(StatsBaseModel): contributors: List[Contributor] def to_table(self)", "List[str]: return [ 'Pull requests:', '--------------', 'Open Closed Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}'", "to_table(self) -> List[str]: raise NotImplementedError def to_dict(self) -> Dict[str, Any]:", "-> List[str]: return [ 'Most active contributors:', '-------------------------', 'Name' +", "to_table(self) -> List[str]: return [ 'Most active contributors:', '-------------------------', 'Name'", "Any, Dict, List, Type @dataclass(frozen=True) class StatsBaseModel: \"\"\"Base model for", "* 20) + 'Commits', ] + [f'{c.name.ljust(24)}{c.commit_count}' for c in", "typing import Any, Dict, List, Type @dataclass(frozen=True) class StatsBaseModel: \"\"\"Base", "'Open Closed Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}' ] @dataclass(frozen=True) class IssueStats(StatsBaseModel): open_count: int", "Dict[str, Any]: return asdict(self) @dataclass(frozen=True) class Contributor: name: str commit_count:", "'Most active contributors:', '-------------------------', 'Name' + (' ' * 20)", "[ 'Pull requests:', '--------------', 'Open Closed Old', f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}' ] @dataclass(frozen=True)", "] + [f'{c.name.ljust(24)}{c.commit_count}' for c in self.contributors] @dataclass(frozen=True) class PullRequestStats(StatsBaseModel):", "List[Contributor] def to_table(self) -> List[str]: return [ 'Most active contributors:',", "def to_table(self) -> List[str]: return [ 'Most active contributors:', '-------------------------',", "'Commits', ] + [f'{c.name.ljust(24)}{c.commit_count}' for c in self.contributors] @dataclass(frozen=True) class" ]
[ "datetime_string = self.get(datum, self.field_name) if isinstance(datetime_string, datetime.datetime): to_compare = datetime_string", "any((self.start, self.end)): return None query_params = dict() if self.start: query_params[\"{}__gte\".format(self.field_name)]", "if query_params: return queryset.filter(**query_params) else: return queryset.none() min_datetime = datetime.datetime.min.replace(tzinfo=pytz.utc)", "= self.get(datum, self.field_name) if isinstance(datetime_string, datetime.datetime): to_compare = datetime_string else:", "DictFilterMixin, FieldFilter): @property def start(self): return get_start(self.filter_args.get(\"start\")) @property def end(self):", "start_date_str: return None return parse(start_date_str) def get_end(end_date_str): if not end_date_str:", "FieldFilter, DictFilterMixin, DjangoQueryFilterMixin from .queryfilter import QueryFilter WHOLE_DAY = datetime.timedelta(days=1)", "datetime_string else: to_compare = parse(datetime_string) if not self.start and not", "= datetime_string else: to_compare = parse(datetime_string) if not self.start and", "@property def end(self): end_datetime = get_end(self.filter_args.get(\"end\")) if not end_datetime: return", "def query_params(self): if not any((self.start, self.end)): return None query_params =", "False if self.end and (self.end < to_compare): return False return", "= datetime.datetime.min.replace(tzinfo=pytz.utc) max_datetime = datetime.datetime.max.replace(tzinfo=pytz.utc) def get_start(start_date_str): if not start_date_str:", "dicts)) @property def query_params(self): if not any((self.start, self.end)): return None", "0 and \\ value.second == 0 and \\ value.microsecond ==", "return True return list(filter(in_range, dicts)) @property def query_params(self): if not", "return False if self.start and (to_compare < self.start): return False", "get_start(self.filter_args.get(\"start\")) @property def end(self): end_datetime = get_end(self.filter_args.get(\"end\")) if not end_datetime:", "= datetime_data.replace(tzinfo=pytz.utc) return datetime_data def _has_no_time_info(value): return value.hour == 0", "if not end_datetime: return None if _has_no_time_info(end_datetime): end_datetime = end_datetime", "not self.end: return False if self.start and (to_compare < self.start):", "else: return queryset.none() min_datetime = datetime.datetime.min.replace(tzinfo=pytz.utc) max_datetime = datetime.datetime.max.replace(tzinfo=pytz.utc) def", "self.start and (to_compare < self.start): return False if self.end and", "datetime.timedelta(seconds=1) @QueryFilter.register_type_condition('datetime') class DatetimeRangeFilter(DjangoQueryFilterMixin, DictFilterMixin, FieldFilter): @property def start(self): return", "from __future__ import absolute_import import datetime from dateutil import parser", ".queryfilter import QueryFilter WHOLE_DAY = datetime.timedelta(days=1) ONE_SECOND = datetime.timedelta(seconds=1) @QueryFilter.register_type_condition('datetime')", "datetime.timedelta(days=1) ONE_SECOND = datetime.timedelta(seconds=1) @QueryFilter.register_type_condition('datetime') class DatetimeRangeFilter(DjangoQueryFilterMixin, DictFilterMixin, FieldFilter): @property", "return value.hour == 0 and \\ value.minute == 0 and", "datetime_data def _has_no_time_info(value): return value.hour == 0 and \\ value.minute", "queryset.none() min_datetime = datetime.datetime.min.replace(tzinfo=pytz.utc) max_datetime = datetime.datetime.max.replace(tzinfo=pytz.utc) def get_start(start_date_str): if", "datetime.datetime.max.replace(tzinfo=pytz.utc) def get_start(start_date_str): if not start_date_str: return None return parse(start_date_str)", "None return parse(start_date_str) def get_end(end_date_str): if not end_date_str: return None", "== 0 and \\ value.minute == 0 and \\ value.second", "return query_params def _do_django_query(self, queryset): query_params = self.query_params if query_params:", "DatetimeRangeFilter(DjangoQueryFilterMixin, DictFilterMixin, FieldFilter): @property def start(self): return get_start(self.filter_args.get(\"start\")) @property def", "_has_no_time_info(value): return value.hour == 0 and \\ value.minute == 0", "else: to_compare = parse(datetime_string) if not self.start and not self.end:", "and (to_compare < self.start): return False if self.end and (self.end", "return parse(end_date_str) def parse(datetime_string): return make_time_aware(parser.parse(datetime_string)) def make_time_aware(datetime_data): if not", "import absolute_import import datetime from dateutil import parser import pytz", "(to_compare < self.start): return False if self.end and (self.end <", "if self.end and (self.end < to_compare): return False return True", "not start_date_str: return None return parse(start_date_str) def get_end(end_date_str): if not", "end_date_str: return None return parse(end_date_str) def parse(datetime_string): return make_time_aware(parser.parse(datetime_string)) def", "def _has_no_time_info(value): return value.hour == 0 and \\ value.minute ==", "False return True return list(filter(in_range, dicts)) @property def query_params(self): if", "__future__ import absolute_import import datetime from dateutil import parser import", "and (self.end < to_compare): return False return True return list(filter(in_range,", "return queryset.none() min_datetime = datetime.datetime.min.replace(tzinfo=pytz.utc) max_datetime = datetime.datetime.max.replace(tzinfo=pytz.utc) def get_start(start_date_str):", "query_params def _do_django_query(self, queryset): query_params = self.query_params if query_params: return", "return get_start(self.filter_args.get(\"start\")) @property def end(self): end_datetime = get_end(self.filter_args.get(\"end\")) if not", "def get_end(end_date_str): if not end_date_str: return None return parse(end_date_str) def", "value.hour == 0 and \\ value.minute == 0 and \\", "self.field_name) if isinstance(datetime_string, datetime.datetime): to_compare = datetime_string else: to_compare =", "if not start_date_str: return None return parse(start_date_str) def get_end(end_date_str): if", "return list(filter(in_range, dicts)) @property def query_params(self): if not any((self.start, self.end)):", "query_params(self): if not any((self.start, self.end)): return None query_params = dict()", "datetime.datetime): to_compare = datetime_string else: to_compare = parse(datetime_string) if not", "< to_compare): return False return True return list(filter(in_range, dicts)) @property", "= self.end return query_params def _do_django_query(self, queryset): query_params = self.query_params", "def _do_django_query(self, queryset): query_params = self.query_params if query_params: return queryset.filter(**query_params)", "import datetime from dateutil import parser import pytz from .base", "to_compare): return False return True return list(filter(in_range, dicts)) @property def", "list(filter(in_range, dicts)) @property def query_params(self): if not any((self.start, self.end)): return", "return None query_params = dict() if self.start: query_params[\"{}__gte\".format(self.field_name)] = self.start", "parse(end_date_str) def parse(datetime_string): return make_time_aware(parser.parse(datetime_string)) def make_time_aware(datetime_data): if not datetime_data.tzinfo:", "WHOLE_DAY = datetime.timedelta(days=1) ONE_SECOND = datetime.timedelta(seconds=1) @QueryFilter.register_type_condition('datetime') class DatetimeRangeFilter(DjangoQueryFilterMixin, DictFilterMixin,", "end_datetime + WHOLE_DAY - ONE_SECOND return end_datetime def on_dicts(self, dicts):", "+ WHOLE_DAY - ONE_SECOND return end_datetime def on_dicts(self, dicts): def", "end_datetime = end_datetime + WHOLE_DAY - ONE_SECOND return end_datetime def", "and not self.end: return False if self.start and (to_compare <", "= get_end(self.filter_args.get(\"end\")) if not end_datetime: return None if _has_no_time_info(end_datetime): end_datetime", "0 and \\ value.minute == 0 and \\ value.second ==", "self.end)): return None query_params = dict() if self.start: query_params[\"{}__gte\".format(self.field_name)] =", "def start(self): return get_start(self.filter_args.get(\"start\")) @property def end(self): end_datetime = get_end(self.filter_args.get(\"end\"))", "ONE_SECOND return end_datetime def on_dicts(self, dicts): def in_range(datum): datetime_string =", "from dateutil import parser import pytz from .base import FieldFilter,", "pytz from .base import FieldFilter, DictFilterMixin, DjangoQueryFilterMixin from .queryfilter import", "self.query_params if query_params: return queryset.filter(**query_params) else: return queryset.none() min_datetime =", "and \\ value.minute == 0 and \\ value.second == 0", "return None return parse(start_date_str) def get_end(end_date_str): if not end_date_str: return", "return queryset.filter(**query_params) else: return queryset.none() min_datetime = datetime.datetime.min.replace(tzinfo=pytz.utc) max_datetime =", "_do_django_query(self, queryset): query_params = self.query_params if query_params: return queryset.filter(**query_params) else:", "import parser import pytz from .base import FieldFilter, DictFilterMixin, DjangoQueryFilterMixin", "return make_time_aware(parser.parse(datetime_string)) def make_time_aware(datetime_data): if not datetime_data.tzinfo: datetime_data = datetime_data.replace(tzinfo=pytz.utc)", "dicts): def in_range(datum): datetime_string = self.get(datum, self.field_name) if isinstance(datetime_string, datetime.datetime):", "if not self.start and not self.end: return False if self.start", "self.start): return False if self.end and (self.end < to_compare): return", "min_datetime = datetime.datetime.min.replace(tzinfo=pytz.utc) max_datetime = datetime.datetime.max.replace(tzinfo=pytz.utc) def get_start(start_date_str): if not", "queryset.filter(**query_params) else: return queryset.none() min_datetime = datetime.datetime.min.replace(tzinfo=pytz.utc) max_datetime = datetime.datetime.max.replace(tzinfo=pytz.utc)", "= dict() if self.start: query_params[\"{}__gte\".format(self.field_name)] = self.start if self.end: query_params[\"{}__lte\".format(self.field_name)]", "not end_datetime: return None if _has_no_time_info(end_datetime): end_datetime = end_datetime +", "self.end and (self.end < to_compare): return False return True return", "def end(self): end_datetime = get_end(self.filter_args.get(\"end\")) if not end_datetime: return None", "if not datetime_data.tzinfo: datetime_data = datetime_data.replace(tzinfo=pytz.utc) return datetime_data def _has_no_time_info(value):", "True return list(filter(in_range, dicts)) @property def query_params(self): if not any((self.start,", "get_end(self.filter_args.get(\"end\")) if not end_datetime: return None if _has_no_time_info(end_datetime): end_datetime =", "None if _has_no_time_info(end_datetime): end_datetime = end_datetime + WHOLE_DAY - ONE_SECOND", "on_dicts(self, dicts): def in_range(datum): datetime_string = self.get(datum, self.field_name) if isinstance(datetime_string,", "query_params[\"{}__lte\".format(self.field_name)] = self.end return query_params def _do_django_query(self, queryset): query_params =", "= parse(datetime_string) if not self.start and not self.end: return False", "not end_date_str: return None return parse(end_date_str) def parse(datetime_string): return make_time_aware(parser.parse(datetime_string))", "def parse(datetime_string): return make_time_aware(parser.parse(datetime_string)) def make_time_aware(datetime_data): if not datetime_data.tzinfo: datetime_data", "@property def start(self): return get_start(self.filter_args.get(\"start\")) @property def end(self): end_datetime =", "value.minute == 0 and \\ value.second == 0 and \\", "datetime_data.tzinfo: datetime_data = datetime_data.replace(tzinfo=pytz.utc) return datetime_data def _has_no_time_info(value): return value.hour", "def in_range(datum): datetime_string = self.get(datum, self.field_name) if isinstance(datetime_string, datetime.datetime): to_compare", "DjangoQueryFilterMixin from .queryfilter import QueryFilter WHOLE_DAY = datetime.timedelta(days=1) ONE_SECOND =", "not datetime_data.tzinfo: datetime_data = datetime_data.replace(tzinfo=pytz.utc) return datetime_data def _has_no_time_info(value): return", "return parse(start_date_str) def get_end(end_date_str): if not end_date_str: return None return", "def make_time_aware(datetime_data): if not datetime_data.tzinfo: datetime_data = datetime_data.replace(tzinfo=pytz.utc) return datetime_data", "end_datetime = get_end(self.filter_args.get(\"end\")) if not end_datetime: return None if _has_no_time_info(end_datetime):", "if not any((self.start, self.end)): return None query_params = dict() if", "not self.start and not self.end: return False if self.start and", "parse(datetime_string) if not self.start and not self.end: return False if", "return False return True return list(filter(in_range, dicts)) @property def query_params(self):", "isinstance(datetime_string, datetime.datetime): to_compare = datetime_string else: to_compare = parse(datetime_string) if", ".base import FieldFilter, DictFilterMixin, DjangoQueryFilterMixin from .queryfilter import QueryFilter WHOLE_DAY", "self.start and not self.end: return False if self.start and (to_compare", "if not end_date_str: return None return parse(end_date_str) def parse(datetime_string): return", "ONE_SECOND = datetime.timedelta(seconds=1) @QueryFilter.register_type_condition('datetime') class DatetimeRangeFilter(DjangoQueryFilterMixin, DictFilterMixin, FieldFilter): @property def", "if self.start and (to_compare < self.start): return False if self.end", "= datetime.timedelta(seconds=1) @QueryFilter.register_type_condition('datetime') class DatetimeRangeFilter(DjangoQueryFilterMixin, DictFilterMixin, FieldFilter): @property def start(self):", "False if self.start and (to_compare < self.start): return False if", "datetime.datetime.min.replace(tzinfo=pytz.utc) max_datetime = datetime.datetime.max.replace(tzinfo=pytz.utc) def get_start(start_date_str): if not start_date_str: return", "= self.start if self.end: query_params[\"{}__lte\".format(self.field_name)] = self.end return query_params def", "self.end: return False if self.start and (to_compare < self.start): return", "query_params = self.query_params if query_params: return queryset.filter(**query_params) else: return queryset.none()", "= end_datetime + WHOLE_DAY - ONE_SECOND return end_datetime def on_dicts(self,", "query_params = dict() if self.start: query_params[\"{}__gte\".format(self.field_name)] = self.start if self.end:", "if _has_no_time_info(end_datetime): end_datetime = end_datetime + WHOLE_DAY - ONE_SECOND return", "get_start(start_date_str): if not start_date_str: return None return parse(start_date_str) def get_end(end_date_str):", "< self.start): return False if self.end and (self.end < to_compare):", "return datetime_data def _has_no_time_info(value): return value.hour == 0 and \\", "import pytz from .base import FieldFilter, DictFilterMixin, DjangoQueryFilterMixin from .queryfilter", "@property def query_params(self): if not any((self.start, self.end)): return None query_params", "make_time_aware(parser.parse(datetime_string)) def make_time_aware(datetime_data): if not datetime_data.tzinfo: datetime_data = datetime_data.replace(tzinfo=pytz.utc) return", "import FieldFilter, DictFilterMixin, DjangoQueryFilterMixin from .queryfilter import QueryFilter WHOLE_DAY =", "not any((self.start, self.end)): return None query_params = dict() if self.start:", "end_datetime: return None if _has_no_time_info(end_datetime): end_datetime = end_datetime + WHOLE_DAY", "QueryFilter WHOLE_DAY = datetime.timedelta(days=1) ONE_SECOND = datetime.timedelta(seconds=1) @QueryFilter.register_type_condition('datetime') class DatetimeRangeFilter(DjangoQueryFilterMixin,", "= datetime.timedelta(days=1) ONE_SECOND = datetime.timedelta(seconds=1) @QueryFilter.register_type_condition('datetime') class DatetimeRangeFilter(DjangoQueryFilterMixin, DictFilterMixin, FieldFilter):", "absolute_import import datetime from dateutil import parser import pytz from", "return None return parse(end_date_str) def parse(datetime_string): return make_time_aware(parser.parse(datetime_string)) def make_time_aware(datetime_data):", "self.end return query_params def _do_django_query(self, queryset): query_params = self.query_params if", "WHOLE_DAY - ONE_SECOND return end_datetime def on_dicts(self, dicts): def in_range(datum):", "def get_start(start_date_str): if not start_date_str: return None return parse(start_date_str) def", "@QueryFilter.register_type_condition('datetime') class DatetimeRangeFilter(DjangoQueryFilterMixin, DictFilterMixin, FieldFilter): @property def start(self): return get_start(self.filter_args.get(\"start\"))", "datetime from dateutil import parser import pytz from .base import", "dateutil import parser import pytz from .base import FieldFilter, DictFilterMixin,", "datetime_data = datetime_data.replace(tzinfo=pytz.utc) return datetime_data def _has_no_time_info(value): return value.hour ==", "DictFilterMixin, DjangoQueryFilterMixin from .queryfilter import QueryFilter WHOLE_DAY = datetime.timedelta(days=1) ONE_SECOND", "None return parse(end_date_str) def parse(datetime_string): return make_time_aware(parser.parse(datetime_string)) def make_time_aware(datetime_data): if", "datetime_data.replace(tzinfo=pytz.utc) return datetime_data def _has_no_time_info(value): return value.hour == 0 and", "get_end(end_date_str): if not end_date_str: return None return parse(end_date_str) def parse(datetime_string):", "from .queryfilter import QueryFilter WHOLE_DAY = datetime.timedelta(days=1) ONE_SECOND = datetime.timedelta(seconds=1)", "self.get(datum, self.field_name) if isinstance(datetime_string, datetime.datetime): to_compare = datetime_string else: to_compare", "dict() if self.start: query_params[\"{}__gte\".format(self.field_name)] = self.start if self.end: query_params[\"{}__lte\".format(self.field_name)] =", "None query_params = dict() if self.start: query_params[\"{}__gte\".format(self.field_name)] = self.start if", "\\ value.minute == 0 and \\ value.second == 0 and", "= self.query_params if query_params: return queryset.filter(**query_params) else: return queryset.none() min_datetime", "(self.end < to_compare): return False return True return list(filter(in_range, dicts))", "self.start: query_params[\"{}__gte\".format(self.field_name)] = self.start if self.end: query_params[\"{}__lte\".format(self.field_name)] = self.end return", "= datetime.datetime.max.replace(tzinfo=pytz.utc) def get_start(start_date_str): if not start_date_str: return None return", "end_datetime def on_dicts(self, dicts): def in_range(datum): datetime_string = self.get(datum, self.field_name)", "if isinstance(datetime_string, datetime.datetime): to_compare = datetime_string else: to_compare = parse(datetime_string)", "end(self): end_datetime = get_end(self.filter_args.get(\"end\")) if not end_datetime: return None if", "in_range(datum): datetime_string = self.get(datum, self.field_name) if isinstance(datetime_string, datetime.datetime): to_compare =", "== 0 and \\ value.second == 0 and \\ value.microsecond", "FieldFilter): @property def start(self): return get_start(self.filter_args.get(\"start\")) @property def end(self): end_datetime", "return end_datetime def on_dicts(self, dicts): def in_range(datum): datetime_string = self.get(datum,", "queryset): query_params = self.query_params if query_params: return queryset.filter(**query_params) else: return", "return False if self.end and (self.end < to_compare): return False", "import QueryFilter WHOLE_DAY = datetime.timedelta(days=1) ONE_SECOND = datetime.timedelta(seconds=1) @QueryFilter.register_type_condition('datetime') class", "if self.end: query_params[\"{}__lte\".format(self.field_name)] = self.end return query_params def _do_django_query(self, queryset):", "return None if _has_no_time_info(end_datetime): end_datetime = end_datetime + WHOLE_DAY -", "self.end: query_params[\"{}__lte\".format(self.field_name)] = self.end return query_params def _do_django_query(self, queryset): query_params", "- ONE_SECOND return end_datetime def on_dicts(self, dicts): def in_range(datum): datetime_string", "from .base import FieldFilter, DictFilterMixin, DjangoQueryFilterMixin from .queryfilter import QueryFilter", "max_datetime = datetime.datetime.max.replace(tzinfo=pytz.utc) def get_start(start_date_str): if not start_date_str: return None", "and \\ value.second == 0 and \\ value.microsecond == 0", "self.start if self.end: query_params[\"{}__lte\".format(self.field_name)] = self.end return query_params def _do_django_query(self,", "class DatetimeRangeFilter(DjangoQueryFilterMixin, DictFilterMixin, FieldFilter): @property def start(self): return get_start(self.filter_args.get(\"start\")) @property", "to_compare = datetime_string else: to_compare = parse(datetime_string) if not self.start", "parse(datetime_string): return make_time_aware(parser.parse(datetime_string)) def make_time_aware(datetime_data): if not datetime_data.tzinfo: datetime_data =", "start(self): return get_start(self.filter_args.get(\"start\")) @property def end(self): end_datetime = get_end(self.filter_args.get(\"end\")) if", "if self.start: query_params[\"{}__gte\".format(self.field_name)] = self.start if self.end: query_params[\"{}__lte\".format(self.field_name)] = self.end", "def on_dicts(self, dicts): def in_range(datum): datetime_string = self.get(datum, self.field_name) if", "query_params[\"{}__gte\".format(self.field_name)] = self.start if self.end: query_params[\"{}__lte\".format(self.field_name)] = self.end return query_params", "parser import pytz from .base import FieldFilter, DictFilterMixin, DjangoQueryFilterMixin from", "query_params: return queryset.filter(**query_params) else: return queryset.none() min_datetime = datetime.datetime.min.replace(tzinfo=pytz.utc) max_datetime", "parse(start_date_str) def get_end(end_date_str): if not end_date_str: return None return parse(end_date_str)", "to_compare = parse(datetime_string) if not self.start and not self.end: return", "_has_no_time_info(end_datetime): end_datetime = end_datetime + WHOLE_DAY - ONE_SECOND return end_datetime", "make_time_aware(datetime_data): if not datetime_data.tzinfo: datetime_data = datetime_data.replace(tzinfo=pytz.utc) return datetime_data def" ]
[ "= reduction self.loss_weight = loss_weight if self.use_sigmoid: self.loss_function = binary_cross_entropy", "diff * diff / beta, diff - 0.5 * beta)", "0 diff = torch.abs(pred - target) loss = torch.where(diff <", "torch.nonzero(labels >= 1).squeeze() if inds.numel() > 0: bin_labels[inds, labels[inds] -", "with reduction=\"sum\"') return loss def reduce_loss(loss, reduction): \"\"\"Reduce loss compute.", "= partial(func, **kwargs) if kwargs else func map_results = map(pfunc,", "= [] for i in range(num_levels): anchor_stride = self.anchor_strides[i] feat_h,", "# This program is distributed in the hope that it", "use_mask=False, reduction='mean', loss_weight=1.0): \"\"\"Init Cross Entropy loss. :param desc: config", "'sum') reduction = ( reduction_override if reduction_override else self.reduction) loss_cls", "be used with reduction=\"sum\"') return loss def reduce_loss(loss, reduction): \"\"\"Reduce", "[x[0]['img_meta']] gt_bboxes_ignore = x[0]['gt_bboxes_ignore'].cuda() return cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore", "inds.numel() > 0: bin_labels[inds, labels[inds] - 1] = 1 if", "= weight_reduce_loss(loss, reduction=reduction, avg_factor=avg_factor) return loss def mask_cross_entropy(pred, target, label,", "beta > 0 assert pred.size() == target.size() and target.numel() >", "= x[2][0] bbox_preds = x[2][1] gt_bboxes = x[0]['gt_bboxes'].cuda() img_metas =", "x losses_cls, losses_bbox = multi_apply(self.loss, cls_score, bbox_pred, labels, label_weights, bbox_targets,", "WITHOUT ANY WARRANTY; without even the implied warranty of #", "the MIT License. # This program is distributed in the", "= img_meta['pad_shape'] valid_feat_h = min(int(np.ceil(h / anchor_stride)), feat_h) valid_feat_w =", "loss_weight=1.0): \"\"\"Init smooth l1 loss.\"\"\" super(CustomSmoothL1Loss, self).__init__() self.beta = beta", "\"\"\" loss = F.cross_entropy(pred, label, reduction='none') if weight is not", "labels.new_full((labels.size(0), label_channels), 0) inds = torch.nonzero(labels >= 1).squeeze() if inds.numel()", "self.target_means, self.target_stds, self.cfg, gt_bboxes_ignore_list=gt_bboxes_ignore, gt_labels_list=None, label_channels=self.label_channels, sampling=self.sampling) @NetworkFactory.register(NetTypes.Operator) class Anchors(nn.Module):", "License for more details. \"\"\"Import all torch operators.\"\"\" import torch.nn.functional", "License. # This program is distributed in the hope that", "self.anchor_ratios = anchor_ratios or [0.5, 1.0, 2.0] self.anchor_strides = anchor_strides", "= x losses_cls, losses_bbox = multi_apply(self.loss, cls_score, bbox_pred, labels, label_weights,", "Ltd. All rights reserved. # This program is free software;", "label and label weights \"\"\" bin_labels = labels.new_full((labels.size(0), label_channels), 0)", "\"\"\"Cross Entropy Loss.\"\"\" def __init__(self, use_sigmoid=False, use_mask=False, reduction='mean', loss_weight=1.0): \"\"\"Init", "weight.float() loss = F.binary_cross_entropy_with_logits( pred, label.float(), weight, reduction='none') loss =", "self.anchor_base_sizes_cfg is None else self.anchor_base_sizes_cfg super(Anchors, self).__init__() def forward(self, x):", "Config({'assigner': {'name': 'MaxIoUAllNegAssigner', 'pos_iou_thr': 0.7, 'neg_iou_thr': tuple([-1, 0.3]), 'min_pos_iou': 0.3,", "self.anchor_ratios)) num_imgs = len(img_metas) num_levels = len(featmap_sizes) multi_level_anchors = []", "import NetworkFactory from vega.search_space.networks.net_utils import NetTypes from vega.search_space.networks.pytorch.utils.anchor_utils.anchor_target import AnchorTarget", "multi_level_flags.append(flags) valid_flag_list.append(multi_level_flags) return anchor_list, valid_flag_list, multi_level_anchors, gt_bboxes, img_metas, gt_bboxes_ignore def", "return F.binary_cross_entropy_with_logits(pred_slice, target, reduction='mean')[None] def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None): \"\"\"Weight", "num_rois = pred.size()[0] inds = torch.arange(0, num_rois, dtype=torch.long, device=pred.device) pred_slice", "= [] for i in range(num_levels): anchors = anchor_generators[i].grid_anchors(featmap_sizes[i], self.anchor_strides[i])", "Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.", "if weight is not None: loss = loss * weight", "super(RpnLossInput, self).__init__() def forward(self, x): \"\"\"Get cls score.\"\"\" cls_scores =", "label.float(), weight, reduction='none') loss = weight_reduce_loss(loss, reduction=reduction, avg_factor=avg_factor) return loss", "None num_rois = pred.size()[0] inds = torch.arange(0, num_rois, dtype=torch.long, device=pred.device)", "reduction='none') loss = weight_reduce_loss(loss, reduction=reduction, avg_factor=avg_factor) return loss def mask_cross_entropy(pred,", "beta) return loss def cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): \"\"\"Cross", ":param avg_factor: avg factor :return: loss \"\"\" if weight is", "function :param avg_factor: avg factor :return: loss \"\"\" if weight", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None): \"\"\"Weight reduce loss. :param loss: losses", "if target.numel() > 0: loss_bbox = self.loss_weight * smooth_l1_loss( pred,", "anchor_generators[i].grid_anchors(featmap_sizes[i], self.anchor_strides[i]) multi_level_anchors.append(anchors) anchor_list = [multi_level_anchors for _ in range(num_imgs)]", "weight: weight :param avg_factor: avg factor :param reduction_override: reduce override", "1).expand(label_weights.size(0), label_channels) return bin_labels, bin_label_weights def binary_cross_entropy(pred, label, weight=None, reduction='mean',", "losses_cls, losses_bbox = multi_apply(self.loss, cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights,", "avg_factor=avg_factor) return loss def mask_cross_entropy(pred, target, label, reduction='mean', avg_factor=None): \"\"\"Mask", "for img_id, img_meta in enumerate(img_metas): multi_level_flags = [] for i", "if reduction == 'mean': loss = loss.sum() / avg_factor elif", "= x[0]['gt_bboxes_ignore'].cuda() return cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore @NetworkFactory.register(NetTypes.Operator) class", "the # MIT License for more details. \"\"\"Import all torch", "\"\"\"Get x.\"\"\" (cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_pos, num_total_neg,", "featmap_sizes[i] h, w, _ = img_meta['pad_shape'] valid_feat_h = min(int(np.ceil(h /", "= target_means or (.0, .0, .0, .0) self.target_stds = target_stds", "1 self.cfg = Config({'assigner': {'name': 'MaxIoUAllNegAssigner', 'pos_iou_thr': 0.7, 'neg_iou_thr': tuple([-1,", "[8, 16, 32] self.anchor_ratios = anchor_ratios or [0.5, 1.0, 2.0]", "gt_bboxes_ignore = x featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores]", ":return: loss \"\"\" assert beta > 0 assert pred.size() ==", "diff / beta, diff - 0.5 * beta) return loss", "bbox_targets, bbox_weights, num_total_samples=num_total_samples) return losses_cls, losses_bbox def loss(self, cls_score, bbox_pred,", "inds = torch.arange(0, num_rois, dtype=torch.long, device=pred.device) pred_slice = pred[inds, label].squeeze(1)", "weight=None, reduction='mean', avg_factor=None): \"\"\"Cross entropy losses. :param pred: predict result", "= x[0] bbox_preds = x[1] return cls_scores, bbox_preds @NetworkFactory.register(NetTypes.Operator) class", "= binary_cross_entropy elif self.use_mask: self.loss_function = mask_cross_entropy else: self.loss_function =", "class RpnClsLoss(nn.Module): \"\"\"Rpn Class Loss.\"\"\" def __init__(self, out_channels=2): super(RpnClsLoss, self).__init__()", "torch.FloatTensor([0.0]).cuda() @weighted_loss def smooth_l1_loss(pred, target, beta=1.0): \"\"\"Smooth l1 loss. :param", "3, 1).reshape(-1, 4) loss_bbox = self.loss_bbox(bbox_pred, bbox_targets, bbox_weights, avg_factor=num_total_samples) return", "is None: loss = reduce_loss(loss, reduction) else: if reduction ==", "avg_factor elif reduction != 'none': raise ValueError('avg_factor can not be", "override :return: loss \"\"\" reduction = ( reduction_override if reduction_override", "'pos_iou_thr': 0.7, 'neg_iou_thr': tuple([-1, 0.3]), 'min_pos_iou': 0.3, 'ignore_iof_thr': 0.5}, 'sampler':", "False}) self.sampling = sampling super(AnchorTargetOp, self).__init__() def forward(self, x): \"\"\"Create", "valid_flag_list, gt_bboxes, img_metas, self.target_means, self.target_stds, self.cfg, gt_bboxes_ignore_list=gt_bboxes_ignore, gt_labels_list=None, label_channels=self.label_channels, sampling=self.sampling)", "range(num_levels): anchors = anchor_generators[i].grid_anchors(featmap_sizes[i], self.anchor_strides[i]) multi_level_anchors.append(anchors) anchor_list = [multi_level_anchors for", "return bin_labels, bin_label_weights def binary_cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): \"\"\"Binary", "= [x[0]['img_meta']] gt_bboxes_ignore = x[0]['gt_bboxes_ignore'].cuda() return cls_scores, bbox_preds, gt_bboxes, img_metas,", "None else self.anchor_base_sizes_cfg super(Anchors, self).__init__() def forward(self, x): \"\"\"Create anchor.\"\"\"", "beta :return: loss \"\"\" assert beta > 0 assert pred.size()", "len(featmap_sizes) multi_level_anchors = [] for i in range(num_levels): anchors =", "*args, **kwargs): \"\"\"Multi apply. :param func: function :param args: args", "3, 1).reshape(-1, self.out_channels) loss_cls = self.loss_cls(cls_score, labels, label_weights, avg_factor=num_total_samples) bbox_targets", "bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_samples=num_total_samples) return losses_cls, losses_bbox def", "self.out_channels = out_channels def forward(self, x): \"\"\"Get x.\"\"\" (cls_score, bbox_pred,", "= x[0]['gt_bboxes'].cuda() img_metas = [x[0]['img_meta']] gt_bboxes_ignore = x[0]['gt_bboxes_ignore'].cuda() return cls_scores,", "2, 3, 1).reshape(-1, 4) loss_bbox = self.loss_bbox(bbox_pred, bbox_targets, bbox_weights, avg_factor=num_total_samples)", "and label weights \"\"\" bin_labels = labels.new_full((labels.size(0), label_channels), 0) inds", "anchor_list = [multi_level_anchors for _ in range(num_imgs)] valid_flag_list = []", "- 1] = 1 if label_weights is None: bin_label_weights =", "avg_factor=None): \"\"\"Weight reduce loss. :param loss: losses :param weight: weight", "\"\"\"Reduce loss compute. :param loss: losses :param reduction: reduce funtion", "'mean' and avg_factor is None num_rois = pred.size()[0] inds =", "else 1 self.cfg = Config({'assigner': {'name': 'MaxIoUAllNegAssigner', 'pos_iou_thr': 0.7, 'neg_iou_thr':", "from functools import partial import numpy as np from six.moves", "0.5, 'neg_pos_ub': -1, 'add_gt_as_proposals': False}, 'allowed_border': 0, 'pos_weight': -1, 'debug':", "def __init__(self, target_means=None, target_stds=None, num_classes=2, use_sigmoid_cls=False, cfg=None, sampling=True): self.target_means =", "= CustomSmoothL1Loss() self.out_channels = out_channels def forward(self, x): \"\"\"Get x.\"\"\"", "return losses_cls, losses_bbox def loss(self, cls_score, bbox_pred, labels, label_weights, bbox_targets,", "cls_score = cls_score.permute(0, 2, 3, 1).reshape(-1, self.out_channels) loss_cls = self.loss_cls(cls_score,", "bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore @NetworkFactory.register(NetTypes.Operator) class AnchorTargetOp(nn.Module): \"\"\"Anchor Target.\"\"\" def", "feat_h, feat_w = featmap_sizes[i] h, w, _ = img_meta['pad_shape'] valid_feat_h", "\"\"\"Init smooth l1 loss.\"\"\" super(CustomSmoothL1Loss, self).__init__() self.beta = beta self.reduction", "factor :param reduction_override: reduce override :return: loss \"\"\" reduction =", "avg factor :return: loss \"\"\" loss = F.cross_entropy(pred, label, reduction='none')", "= labels.new_full((labels.size(0), label_channels), 0) inds = torch.nonzero(labels >= 1).squeeze() if", "label.dim(): label, weight = _expand_binary_labels(label, weight, pred.size(-1)) if weight is", "loss = weight_reduce_loss(loss, reduction=reduction, avg_factor=avg_factor) return loss def mask_cross_entropy(pred, target,", "[0.5, 1.0, 2.0] self.anchor_strides = anchor_strides or [4, 8, 16,", "self.loss_cls = CustomCrossEntropyLoss() self.loss_bbox = CustomSmoothL1Loss() self.out_channels = out_channels def", "self.use_mask: self.loss_function = mask_cross_entropy else: self.loss_function = cross_entropy def forward(self,", "else self.anchor_base_sizes_cfg super(Anchors, self).__init__() def forward(self, x): \"\"\"Create anchor.\"\"\" cls_scores,", "(None, 'none', 'mean', 'sum') reduction = ( reduction_override if reduction_override", "loss = torch.where(diff < beta, 0.5 * diff * diff", "= labels.reshape(-1) label_weights = label_weights.reshape(-1) cls_score = cls_score.permute(0, 2, 3,", "= label_weights.view(-1, 1).expand(label_weights.size(0), label_channels) return bin_labels, bin_label_weights def binary_cross_entropy(pred, label,", "== 'mean': loss = loss.sum() / avg_factor elif reduction !=", "l1 loss.\"\"\" super(CustomSmoothL1Loss, self).__init__() self.beta = beta self.reduction = reduction", "'pos_weight': -1, 'debug': False}) self.sampling = sampling super(AnchorTargetOp, self).__init__() def", "1).reshape(-1, 4) loss_bbox = self.loss_bbox(bbox_pred, bbox_targets, bbox_weights, avg_factor=num_total_samples) return loss_cls,", "bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) loss_bbox = self.loss_bbox(bbox_pred, bbox_targets, bbox_weights,", "loss def reduce_loss(loss, reduction): \"\"\"Reduce loss compute. :param loss: losses", "avg_factor=None): \"\"\"Mask cross entropy loss. :param pred: predict result :param", "weight is not None: loss = loss * weight if", "forward(self, x): \"\"\"Get cls score and bbox preds.\"\"\" cls_scores =", "bbox_preds = x[2][1] gt_bboxes = x[0]['gt_bboxes'].cuda() img_metas = [x[0]['img_meta']] gt_bboxes_ignore", "\"\"\"Get loss.\"\"\" labels = labels.reshape(-1) label_weights = label_weights.reshape(-1) cls_score =", "2, 3, 1).reshape(-1, self.out_channels) loss_cls = self.loss_cls(cls_score, labels, label_weights, avg_factor=num_total_samples)", "loss_bbox = self.loss_bbox(bbox_pred, bbox_targets, bbox_weights, avg_factor=num_total_samples) return loss_cls, loss_bbox @NetworkFactory.register(NetTypes.Operator)", "loss_weight=1.0): \"\"\"Init Cross Entropy loss. :param desc: config dict \"\"\"", "target_stds=None, num_classes=2, use_sigmoid_cls=False, cfg=None, sampling=True): self.target_means = target_means or (.0,", "torch from vega.search_space.networks.network_factory import NetworkFactory from vega.search_space.networks.net_utils import NetTypes from", "= F.binary_cross_entropy_with_logits( pred, label.float(), weight, reduction='none') loss = weight_reduce_loss(loss, reduction=reduction,", "cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_samples): \"\"\"Get loss.\"\"\" labels", "label_weights.reshape(-1) cls_score = cls_score.permute(0, 2, 3, 1).reshape(-1, self.out_channels) loss_cls =", "[] for img_id, img_meta in enumerate(img_metas): multi_level_flags = [] for", "1.0, 1.0, 1.0) self.label_channels = num_classes if use_sigmoid_cls else 1", ":param reduction_override: reduce override :return: loss \"\"\" reduction = (", "weight = weight.float() loss = F.binary_cross_entropy_with_logits( pred, label.float(), weight, reduction='none')", "anchor_scales or [8, 16, 32] self.anchor_ratios = anchor_ratios or [0.5,", "not None: loss = loss * weight if avg_factor is", "@NetworkFactory.register(NetTypes.Operator) class RpnClsLoss(nn.Module): \"\"\"Rpn Class Loss.\"\"\" def __init__(self, out_channels=2): super(RpnClsLoss,", "function :param args: args of function :return: result \"\"\" pfunc", "bbox_targets, bbox_weights, avg_factor=num_total_samples) return loss_cls, loss_bbox @NetworkFactory.register(NetTypes.Operator) class CustomCrossEntropyLoss(nn.Module): \"\"\"Cross", "return loss def reduce_loss(loss, reduction): \"\"\"Reduce loss compute. :param loss:", "Entropy Loss.\"\"\" def __init__(self, use_sigmoid=False, use_mask=False, reduction='mean', loss_weight=1.0): \"\"\"Init Cross", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR", ":param target: target :param label: gt label :param reduction: reduce", "32, 64] self.anchor_base_sizes = list( self.anchor_strides) if self.anchor_base_sizes_cfg is None", "smooth l1 loss.\"\"\" super(CustomSmoothL1Loss, self).__init__() self.beta = beta self.reduction =", "loss def _expand_binary_labels(labels, label_weights, label_channels): \"\"\"Expand binary labels. :param labels:", "x[0] bbox_preds = x[1] return cls_scores, bbox_preds @NetworkFactory.register(NetTypes.Operator) class RpnLossInput(nn.Module):", "predict :param target: target :param beta: beta :return: loss \"\"\"", ":param pred: predict result :param label: gt label :param weight:", "is not None: weight = weight.float() loss = F.binary_cross_entropy_with_logits( pred,", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "for i in range(num_levels): anchor_stride = self.anchor_strides[i] feat_h, feat_w =", "gt_bboxes = x[0]['gt_bboxes'].cuda() img_metas = [x[0]['img_meta']] gt_bboxes_ignore = x[0]['gt_bboxes_ignore'].cuda() return", "= num_classes if use_sigmoid_cls else 1 self.cfg = Config({'assigner': {'name':", "target :param label: gt label :param reduction: reduce function :param", "it under the terms of the MIT License. # This", ":param func: function :param args: args of function :return: result", "else: if reduction == 'mean': loss = loss.sum() / avg_factor", "loss \"\"\" if pred.dim() != label.dim(): label, weight = _expand_binary_labels(label,", "= reduction self.loss_weight = loss_weight def forward(self, pred, target, weight=None,", "target: target :param weight: weight :param avg_factor: avg factor :param", "anchors according to feature map sizes.\"\"\" def __init__(self, anchor_base_sizes_cfg=None, anchor_scales=None,", "reduction='mean', loss_weight=1.0): \"\"\"Init Cross Entropy loss. :param desc: config dict", "pfunc = partial(func, **kwargs) if kwargs else func map_results =", "factor :return: loss \"\"\" if weight is not None: loss", "avg_factor=None, reduction_override=None, **kwargs): \"\"\"Forward compute. :param pred: predict :param target:", "import partial import numpy as np from six.moves import map,", "= torch.where(diff < beta, 0.5 * diff * diff /", "sizes.\"\"\" def __init__(self, anchor_base_sizes_cfg=None, anchor_scales=None, anchor_ratios=None, anchor_strides=None): self.anchor_base_sizes_cfg = anchor_base_sizes_cfg", "reduce loss. :param loss: losses :param weight: weight :param reduction:", "can redistribute it and/or modify # it under the terms", "CustomSmoothL1Loss() self.out_channels = out_channels def forward(self, x): \"\"\"Get x.\"\"\" (cls_score,", "or (1.0, 1.0, 1.0, 1.0) self.label_channels = num_classes if use_sigmoid_cls", "= [multi_level_anchors for _ in range(num_imgs)] valid_flag_list = [] for", "anchor_list, valid_flag_list, multi_level_anchors, gt_bboxes, img_metas, gt_bboxes_ignore def multi_apply(func, *args, **kwargs):", "gt_bboxes_ignore def multi_apply(func, *args, **kwargs): \"\"\"Multi apply. :param func: function", "weight: weight :param reduction: reduce function :param avg_factor: avg factor", "gt_bboxes, img_metas, gt_bboxes_ignore def multi_apply(func, *args, **kwargs): \"\"\"Multi apply. :param", "self.loss_function = binary_cross_entropy elif self.use_mask: self.loss_function = mask_cross_entropy else: self.loss_function", "* weight if avg_factor is None: loss = reduce_loss(loss, reduction)", "beta=self.beta, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_bbox else: return torch.FloatTensor([0.0]).cuda() @weighted_loss", ":return: loss \"\"\" loss = F.cross_entropy(pred, label, reduction='none') if weight", "target) loss = torch.where(diff < beta, 0.5 * diff *", "num_classes=2, use_sigmoid_cls=False, cfg=None, sampling=True): self.target_means = target_means or (.0, .0,", "__init__(self): super(RpnLossInput, self).__init__() def forward(self, x): \"\"\"Get cls score.\"\"\" cls_scores", "\"\"\"Binary cross entropy loss. :param pred: predict result :param label:", "bbox_preds = x[1] return cls_scores, bbox_preds @NetworkFactory.register(NetTypes.Operator) class RpnLossInput(nn.Module): \"\"\"Rpn", "gt_bboxes, img_metas, self.target_means, self.target_stds, self.cfg, gt_bboxes_ignore_list=gt_bboxes_ignore, gt_labels_list=None, label_channels=self.label_channels, sampling=self.sampling) @NetworkFactory.register(NetTypes.Operator)", "img_id, img_meta in enumerate(img_metas): multi_level_flags = [] for i in", "bin_label_weights = None else: bin_label_weights = label_weights.view(-1, 1).expand(label_weights.size(0), label_channels) return", "= cls_score.permute(0, 2, 3, 1).reshape(-1, self.out_channels) loss_cls = self.loss_cls(cls_score, labels,", "import NetTypes from vega.search_space.networks.pytorch.utils.anchor_utils.anchor_target import AnchorTarget from vega.search_space.networks.pytorch.utils.bbox_utils.anchor_generator import AnchorGenerator", "reduction_override else self.reduction) loss_cls = self.loss_weight * self.loss_function(cls_score, label, weight,", "reserved. # This program is free software; you can redistribute", "cross entropy loss. :param pred: predict result :param target: target", "@NetworkFactory.register(NetTypes.Operator) class CustomCrossEntropyLoss(nn.Module): \"\"\"Cross Entropy Loss.\"\"\" def __init__(self, use_sigmoid=False, use_mask=False,", "avg_factor=avg_factor, **kwargs) return loss_cls @NetworkFactory.register(NetTypes.Operator) class CustomSmoothL1Loss(nn.Module): \"\"\"Smooth L1 Loss.\"\"\"", "result :param target: target :param label: gt label :param reduction:", "weight=None, reduction='mean', avg_factor=None): \"\"\"Binary cross entropy loss. :param pred: predict", "factor :return: loss \"\"\" loss = F.cross_entropy(pred, label, reduction='none') if", "= label_weights.reshape(-1) cls_score = cls_score.permute(0, 2, 3, 1).reshape(-1, self.out_channels) loss_cls", "labels[inds] - 1] = 1 if label_weights is None: bin_label_weights", "'mean': loss = loss.sum() / avg_factor elif reduction != 'none':", "else func map_results = map(pfunc, *args) return tuple(map(list, zip(*map_results))) @NetworkFactory.register(NetTypes.Operator)", "reduction: reduce funtion :return: loss \"\"\" reduction_function = F._Reduction.get_enum(reduction) if", "loss_bbox = self.loss_weight * smooth_l1_loss( pred, target, weight, beta=self.beta, reduction=reduction,", ":return: binary label and label weights \"\"\" bin_labels = labels.new_full((labels.size(0),", "= x[1] return cls_scores, bbox_preds @NetworkFactory.register(NetTypes.Operator) class RpnLossInput(nn.Module): \"\"\"Rpn loss", "1.0, 1.0) self.label_channels = num_classes if use_sigmoid_cls else 1 self.cfg", "x): \"\"\"Create anchor.\"\"\" cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore = x", "reduction_override if reduction_override else self.reduction) loss_cls = self.loss_weight * self.loss_function(cls_score,", "weight.float() loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor) return loss def", "4) bbox_weights = bbox_weights.reshape(-1, 4) bbox_pred = bbox_pred.permute(0, 2, 3,", "_ = img_meta['pad_shape'] valid_feat_h = min(int(np.ceil(h / anchor_stride)), feat_h) valid_feat_w", "and bbox preds.\"\"\" cls_scores = x[0] bbox_preds = x[1] return", "x # out=(labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, num_total_pos,num_total_neg). return AnchorTarget(anchor_list, valid_flag_list,", "self.loss_cls(cls_score, labels, label_weights, avg_factor=num_total_samples) bbox_targets = bbox_targets.reshape(-1, 4) bbox_weights =", "reduction == 'mean' and avg_factor is None num_rois = pred.size()[0]", "label_weights, bbox_targets, bbox_weights, num_total_samples=num_total_samples) return losses_cls, losses_bbox def loss(self, cls_score,", ":param pred: predict :param target: target :param beta: beta :return:", "'RandomSampler', 'num': 256, 'pos_fraction': 0.5, 'neg_pos_ub': -1, 'add_gt_as_proposals': False}, 'allowed_border':", "label channels :return: binary label and label weights \"\"\" bin_labels", "if weight is not None: weight = weight.float() loss =", "cross entropy loss. :param pred: predict result :param label: gt", "img_metas, gt_bboxes_ignore = x # out=(labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, num_total_pos,num_total_neg).", "reduction='none') if weight is not None: weight = weight.float() loss", "bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_samples): \"\"\"Get loss.\"\"\" labels =", "return loss_cls @NetworkFactory.register(NetTypes.Operator) class CustomSmoothL1Loss(nn.Module): \"\"\"Smooth L1 Loss.\"\"\" def __init__(self,", "return tuple(map(list, zip(*map_results))) @NetworkFactory.register(NetTypes.Operator) class RpnClsLoss(nn.Module): \"\"\"Rpn Class Loss.\"\"\" def", "import torch from vega.search_space.networks.network_factory import NetworkFactory from vega.search_space.networks.net_utils import NetTypes", "None: bin_label_weights = None else: bin_label_weights = label_weights.view(-1, 1).expand(label_weights.size(0), label_channels)", "valid_flag_list, original_anchors, gt_bboxes, img_metas, gt_bboxes_ignore = x # out=(labels_list, label_weights_list,", "= Config({'assigner': {'name': 'MaxIoUAllNegAssigner', 'pos_iou_thr': 0.7, 'neg_iou_thr': tuple([-1, 0.3]), 'min_pos_iou':", "class RpnLossInput(nn.Module): \"\"\"Rpn loss input.\"\"\" def __init__(self): super(RpnLossInput, self).__init__() def", "NetworkFactory from vega.search_space.networks.net_utils import NetTypes from vega.search_space.networks.pytorch.utils.anchor_utils.anchor_target import AnchorTarget from", "= bbox_targets.reshape(-1, 4) bbox_weights = bbox_weights.reshape(-1, 4) bbox_pred = bbox_pred.permute(0,", "avg_factor=num_total_samples) bbox_targets = bbox_targets.reshape(-1, 4) bbox_weights = bbox_weights.reshape(-1, 4) bbox_pred", "**kwargs): \"\"\"Forward compute.\"\"\" assert reduction_override in (None, 'none', 'mean', 'sum')", "return cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore @NetworkFactory.register(NetTypes.Operator) class AnchorTargetOp(nn.Module): \"\"\"Anchor", ":param reduction: reduce function :param avg_factor: avg factor :return: loss", "\"\"\"Rpn loss input.\"\"\" def __init__(self): super(RpnLossInput, self).__init__() def forward(self, x):", "= bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) loss_bbox = self.loss_bbox(bbox_pred, bbox_targets,", "device=pred.device) pred_slice = pred[inds, label].squeeze(1) return F.binary_cross_entropy_with_logits(pred_slice, target, reduction='mean')[None] def", ":return: loss \"\"\" if pred.dim() != label.dim(): label, weight =", "# it under the terms of the MIT License. #", "\"\"\" assert beta > 0 assert pred.size() == target.size() and", "reduction='mean')[None] def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None): \"\"\"Weight reduce loss. :param", "result \"\"\" pfunc = partial(func, **kwargs) if kwargs else func", "anchor_strides=None): self.anchor_base_sizes_cfg = anchor_base_sizes_cfg self.anchor_scales = anchor_scales or [8, 16,", "img_meta['pad_shape'] valid_feat_h = min(int(np.ceil(h / anchor_stride)), feat_h) valid_feat_w = min(int(np.ceil(w", "-*- coding: utf-8 -*- # Copyright (C) 2020. Huawei Technologies", "or [4, 8, 16, 32, 64] self.anchor_base_sizes = list( self.anchor_strides)", "self.reduction = reduction self.loss_weight = loss_weight if self.use_sigmoid: self.loss_function =", "bin_labels, bin_label_weights def binary_cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): \"\"\"Binary cross", "'allowed_border': 0, 'pos_weight': -1, 'debug': False}) self.sampling = sampling super(AnchorTargetOp,", "featmap in cls_scores] anchor_generators = [] for anchor_base in self.anchor_base_sizes:", "def forward(self, x): \"\"\"Get cls score and bbox preds.\"\"\" cls_scores", "None else: bin_label_weights = label_weights.view(-1, 1).expand(label_weights.size(0), label_channels) return bin_labels, bin_label_weights", "loss \"\"\" reduction = ( reduction_override if reduction_override else self.reduction)", "weight :param avg_factor: avg factor :param reduction_override: reduce override :return:", "__init__(self, target_means=None, target_stds=None, num_classes=2, use_sigmoid_cls=False, cfg=None, sampling=True): self.target_means = target_means", "out=(labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, num_total_pos,num_total_neg). return AnchorTarget(anchor_list, valid_flag_list, gt_bboxes, img_metas,", "for i in range(num_levels): anchors = anchor_generators[i].grid_anchors(featmap_sizes[i], self.anchor_strides[i]) multi_level_anchors.append(anchors) anchor_list", "predict :param target: target :param weight: weight :param avg_factor: avg", "- 0.5 * beta) return loss def cross_entropy(pred, label, weight=None,", "\"\"\"Create anchor.\"\"\" cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore = x featmap_sizes", "This program is free software; you can redistribute it and/or", "* diff / beta, diff - 0.5 * beta) return", "reduction): \"\"\"Reduce loss compute. :param loss: losses :param reduction: reduce", "labels :param label_weights: label weights :param label_channels: label channels :return:", "beta self.reduction = reduction self.loss_weight = loss_weight def forward(self, pred,", "rights reserved. # This program is free software; you can", "return loss def cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): \"\"\"Cross entropy", "self.use_sigmoid = use_sigmoid self.use_mask = use_mask self.reduction = reduction self.loss_weight", "gt_labels_list=None, label_channels=self.label_channels, sampling=self.sampling) @NetworkFactory.register(NetTypes.Operator) class Anchors(nn.Module): \"\"\"Get anchors according to", "num_total_samples): \"\"\"Get loss.\"\"\" labels = labels.reshape(-1) label_weights = label_weights.reshape(-1) cls_score", "> 0 diff = torch.abs(pred - target) loss = torch.where(diff", "self.cfg = Config({'assigner': {'name': 'MaxIoUAllNegAssigner', 'pos_iou_thr': 0.7, 'neg_iou_thr': tuple([-1, 0.3]),", "= None else: bin_label_weights = label_weights.view(-1, 1).expand(label_weights.size(0), label_channels) return bin_labels,", "# -*- coding: utf-8 -*- # Copyright (C) 2020. Huawei", "0.7, 'neg_iou_thr': tuple([-1, 0.3]), 'min_pos_iou': 0.3, 'ignore_iof_thr': 0.5}, 'sampler': {'name':", "= use_sigmoid self.use_mask = use_mask self.reduction = reduction self.loss_weight =", "= len(featmap_sizes) multi_level_anchors = [] for i in range(num_levels): anchors", "0: return loss elif reduction_function == 1: return loss.mean() elif", "\"\"\"Cross entropy losses. :param pred: predict result :param label: gt", "if self.anchor_base_sizes_cfg is None else self.anchor_base_sizes_cfg super(Anchors, self).__init__() def forward(self,", "weight if avg_factor is None: loss = reduce_loss(loss, reduction) else:", "\"\"\" pfunc = partial(func, **kwargs) if kwargs else func map_results", "CustomCrossEntropyLoss(nn.Module): \"\"\"Cross Entropy Loss.\"\"\" def __init__(self, use_sigmoid=False, use_mask=False, reduction='mean', loss_weight=1.0):", "labels, label_weights, bbox_targets, bbox_weights, num_total_pos, num_total_neg, num_total_samples) = x losses_cls,", "loss.\"\"\" labels = labels.reshape(-1) label_weights = label_weights.reshape(-1) cls_score = cls_score.permute(0,", "beta=1.0): \"\"\"Smooth l1 loss. :param pred: predict :param target: target", "= torch.abs(pred - target) loss = torch.where(diff < beta, 0.5", "tuple(map(list, zip(*map_results))) @NetworkFactory.register(NetTypes.Operator) class RpnClsLoss(nn.Module): \"\"\"Rpn Class Loss.\"\"\" def __init__(self,", "func map_results = map(pfunc, *args) return tuple(map(list, zip(*map_results))) @NetworkFactory.register(NetTypes.Operator) class", "self.anchor_strides = anchor_strides or [4, 8, 16, 32, 64] self.anchor_base_sizes", "16, 32, 64] self.anchor_base_sizes = list( self.anchor_strides) if self.anchor_base_sizes_cfg is", "self.anchor_base_sizes = list( self.anchor_strides) if self.anchor_base_sizes_cfg is None else self.anchor_base_sizes_cfg", "else self.reduction) if target.numel() > 0: loss_bbox = self.loss_weight *", "target_means or (.0, .0, .0, .0) self.target_stds = target_stds or", "from vega.search_space.networks.net_utils import NetTypes from vega.search_space.networks.pytorch.utils.anchor_utils.anchor_target import AnchorTarget from vega.search_space.networks.pytorch.utils.bbox_utils.anchor_generator", "**kwargs) if kwargs else func map_results = map(pfunc, *args) return", "label_weights, label_channels): \"\"\"Expand binary labels. :param labels: labels :param label_weights:", "used with reduction=\"sum\"') return loss def reduce_loss(loss, reduction): \"\"\"Reduce loss", "if reduction_override else self.reduction) if target.numel() > 0: loss_bbox =", "pred[inds, label].squeeze(1) return F.binary_cross_entropy_with_logits(pred_slice, target, reduction='mean')[None] def weight_reduce_loss(loss, weight=None, reduction='mean',", "x[0]['gt_bboxes'].cuda() img_metas = [x[0]['img_meta']] gt_bboxes_ignore = x[0]['gt_bboxes_ignore'].cuda() return cls_scores, bbox_preds,", "avg_factor: avg factor :return: loss \"\"\" if weight is not", "from vega.search_space.networks.pytorch.utils.bbox_utils.anchor_generator import AnchorGenerator from vega.core.common.config import Config from functools", "(C) 2020. Huawei Technologies Co., Ltd. All rights reserved. #", "more details. \"\"\"Import all torch operators.\"\"\" import torch.nn.functional as F", "(1.0, 1.0, 1.0, 1.0) self.label_channels = num_classes if use_sigmoid_cls else", "bbox_weights_list, num_total_pos,num_total_neg). return AnchorTarget(anchor_list, valid_flag_list, gt_bboxes, img_metas, self.target_means, self.target_stds, self.cfg,", "numpy as np from six.moves import map, zip from vega.search_space.networks.pytorch.losses.reduce_loss", "in range(num_imgs)] valid_flag_list = [] for img_id, img_meta in enumerate(img_metas):", ":param args: args of function :return: result \"\"\" pfunc =", "and/or modify # it under the terms of the MIT", "@NetworkFactory.register(NetTypes.Operator) class CustomSmoothL1Loss(nn.Module): \"\"\"Smooth L1 Loss.\"\"\" def __init__(self, beta=1.0, reduction='mean',", "assert beta > 0 assert pred.size() == target.size() and target.numel()", "avg factor :return: loss \"\"\" assert reduction == 'mean' and", "from vega.search_space.networks.network_factory import NetworkFactory from vega.search_space.networks.net_utils import NetTypes from vega.search_space.networks.pytorch.utils.anchor_utils.anchor_target", "0) inds = torch.nonzero(labels >= 1).squeeze() if inds.numel() > 0:", "labels = labels.reshape(-1) label_weights = label_weights.reshape(-1) cls_score = cls_score.permute(0, 2,", "anchor_base_sizes_cfg=None, anchor_scales=None, anchor_ratios=None, anchor_strides=None): self.anchor_base_sizes_cfg = anchor_base_sizes_cfg self.anchor_scales = anchor_scales", "binary_cross_entropy elif self.use_mask: self.loss_function = mask_cross_entropy else: self.loss_function = cross_entropy", "modify # it under the terms of the MIT License.", "reduction='mean', avg_factor=None): \"\"\"Mask cross entropy loss. :param pred: predict result", "0 assert pred.size() == target.size() and target.numel() > 0 diff", "cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore @NetworkFactory.register(NetTypes.Operator) class AnchorTargetOp(nn.Module): \"\"\"Anchor Target.\"\"\"", ":param avg_factor: avg factor :return: loss \"\"\" assert reduction ==", "reduction='mean', avg_factor=None): \"\"\"Cross entropy losses. :param pred: predict result :param", "self.anchor_strides[i]) multi_level_anchors.append(anchors) anchor_list = [multi_level_anchors for _ in range(num_imgs)] valid_flag_list", "None: loss = reduce_loss(loss, reduction) else: if reduction == 'mean':", "anchor.\"\"\" cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore = x featmap_sizes =", "X=(anchor_list,valid_flag_list,gt_bboxes,img_metas,).\"\"\" anchor_list, valid_flag_list, original_anchors, gt_bboxes, img_metas, gt_bboxes_ignore = x #", "reduction=reduction, avg_factor=avg_factor) return loss def _expand_binary_labels(labels, label_weights, label_channels): \"\"\"Expand binary", "use_sigmoid self.use_mask = use_mask self.reduction = reduction self.loss_weight = loss_weight", "anchor_generators = [] for anchor_base in self.anchor_base_sizes: anchor_generators.append(AnchorGenerator(anchor_base, self.anchor_scales, self.anchor_ratios))", "loss_cls @NetworkFactory.register(NetTypes.Operator) class CustomSmoothL1Loss(nn.Module): \"\"\"Smooth L1 Loss.\"\"\" def __init__(self, beta=1.0,", "feat_h) valid_feat_w = min(int(np.ceil(w / anchor_stride)), feat_w) flags = anchor_generators[i].valid_flags((feat_h,", "self.use_mask = use_mask self.reduction = reduction self.loss_weight = loss_weight if", "target_stds or (1.0, 1.0, 1.0, 1.0) self.label_channels = num_classes if", "sampling super(AnchorTargetOp, self).__init__() def forward(self, x): \"\"\"Create X=(anchor_list,valid_flag_list,gt_bboxes,img_metas,).\"\"\" anchor_list, valid_flag_list,", "= self.loss_bbox(bbox_pred, bbox_targets, bbox_weights, avg_factor=num_total_samples) return loss_cls, loss_bbox @NetworkFactory.register(NetTypes.Operator) class", "'ignore_iof_thr': 0.5}, 'sampler': {'name': 'RandomSampler', 'num': 256, 'pos_fraction': 0.5, 'neg_pos_ub':", "class CustomCrossEntropyLoss(nn.Module): \"\"\"Cross Entropy Loss.\"\"\" def __init__(self, use_sigmoid=False, use_mask=False, reduction='mean',", "featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] anchor_generators = []", "= min(int(np.ceil(w / anchor_stride)), feat_w) flags = anchor_generators[i].valid_flags((feat_h, feat_w), (valid_feat_h,", "that it will be useful, # but WITHOUT ANY WARRANTY;", "\"\"\"Forward compute.\"\"\" assert reduction_override in (None, 'none', 'mean', 'sum') reduction", "reduction: reduce function :param avg_factor: avg factor :return: loss \"\"\"", "reduction self.loss_weight = loss_weight def forward(self, pred, target, weight=None, avg_factor=None,", "\"\"\"Import all torch operators.\"\"\" import torch.nn.functional as F import torch.nn", "vega.search_space.networks.pytorch.utils.anchor_utils.anchor_target import AnchorTarget from vega.search_space.networks.pytorch.utils.bbox_utils.anchor_generator import AnchorGenerator from vega.core.common.config import", "!= 'none': raise ValueError('avg_factor can not be used with reduction=\"sum\"')", "= self.anchor_strides[i] feat_h, feat_w = featmap_sizes[i] h, w, _ =", "mask_cross_entropy else: self.loss_function = cross_entropy def forward(self, cls_score, label, weight,", "entropy losses. :param pred: predict result :param label: gt label", "def _expand_binary_labels(labels, label_weights, label_channels): \"\"\"Expand binary labels. :param labels: labels", "self).__init__() def forward(self, x): \"\"\"Get cls score and bbox preds.\"\"\"", "self.cfg, gt_bboxes_ignore_list=gt_bboxes_ignore, gt_labels_list=None, label_channels=self.label_channels, sampling=self.sampling) @NetworkFactory.register(NetTypes.Operator) class Anchors(nn.Module): \"\"\"Get anchors", "reduction_override else self.reduction) if target.numel() > 0: loss_bbox = self.loss_weight", "== target.size() and target.numel() > 0 diff = torch.abs(pred -", "target :param weight: weight :param avg_factor: avg factor :param reduction_override:", "RpnClsLoss(nn.Module): \"\"\"Rpn Class Loss.\"\"\" def __init__(self, out_channels=2): super(RpnClsLoss, self).__init__() self.loss_cls", "def binary_cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): \"\"\"Binary cross entropy loss.", "weights :param label_channels: label channels :return: binary label and label", "zip from vega.search_space.networks.pytorch.losses.reduce_loss import weighted_loss @NetworkFactory.register(NetTypes.Operator) class RpnClsLossInput(nn.Module): \"\"\"Rpn input.\"\"\"", "reduction = ( reduction_override if reduction_override else self.reduction) loss_cls =", ":param labels: labels :param label_weights: label weights :param label_channels: label", "self.loss_weight * smooth_l1_loss( pred, target, weight, beta=self.beta, reduction=reduction, avg_factor=avg_factor, **kwargs)", "Anchors(nn.Module): \"\"\"Get anchors according to feature map sizes.\"\"\" def __init__(self,", ">= 1).squeeze() if inds.numel() > 0: bin_labels[inds, labels[inds] - 1]", "be useful, # but WITHOUT ANY WARRANTY; without even the", ":param desc: config dict \"\"\" super(CustomCrossEntropyLoss, self).__init__() self.use_sigmoid = use_sigmoid", "/ anchor_stride)), feat_h) valid_feat_w = min(int(np.ceil(w / anchor_stride)), feat_w) flags", "= cross_entropy def forward(self, cls_score, label, weight, avg_factor, reduction_override=None, **kwargs):", "Loss.\"\"\" def __init__(self, out_channels=2): super(RpnClsLoss, self).__init__() self.loss_cls = CustomCrossEntropyLoss() self.loss_bbox", "avg_factor=None): \"\"\"Binary cross entropy loss. :param pred: predict result :param", "weight=None, avg_factor=None, reduction_override=None, **kwargs): \"\"\"Forward compute. :param pred: predict :param", "reduction='mean', loss_weight=1.0): \"\"\"Init smooth l1 loss.\"\"\" super(CustomSmoothL1Loss, self).__init__() self.beta =", "1.0) self.label_channels = num_classes if use_sigmoid_cls else 1 self.cfg =", "= x featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] anchor_generators", "beta, 0.5 * diff * diff / beta, diff -", "x): \"\"\"Get x.\"\"\" (cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_pos,", "h, w, _ = img_meta['pad_shape'] valid_feat_h = min(int(np.ceil(h / anchor_stride)),", "reduce function :param avg_factor: avg factor :return: loss \"\"\" if", "loss = loss * weight if avg_factor is None: loss", "partial(func, **kwargs) if kwargs else func map_results = map(pfunc, *args)", "- target) loss = torch.where(diff < beta, 0.5 * diff", "= self.loss_weight * self.loss_function(cls_score, label, weight, reduction=reduction, avg_factor=avg_factor, **kwargs) return", "reduce funtion :return: loss \"\"\" reduction_function = F._Reduction.get_enum(reduction) if reduction_function", "reduction != 'none': raise ValueError('avg_factor can not be used with", "x[2][1] gt_bboxes = x[0]['gt_bboxes'].cuda() img_metas = [x[0]['img_meta']] gt_bboxes_ignore = x[0]['gt_bboxes_ignore'].cuda()", "anchor_base_sizes_cfg self.anchor_scales = anchor_scales or [8, 16, 32] self.anchor_ratios =", "False}, 'allowed_border': 0, 'pos_weight': -1, 'debug': False}) self.sampling = sampling", "import Config from functools import partial import numpy as np", "assert reduction_override in (None, 'none', 'mean', 'sum') reduction = (", "cls score.\"\"\" cls_scores = x[2][0] bbox_preds = x[2][1] gt_bboxes =", "\"\"\" super(CustomCrossEntropyLoss, self).__init__() self.use_sigmoid = use_sigmoid self.use_mask = use_mask self.reduction", "in range(num_levels): anchor_stride = self.anchor_strides[i] feat_h, feat_w = featmap_sizes[i] h,", "= min(int(np.ceil(h / anchor_stride)), feat_h) valid_feat_w = min(int(np.ceil(w / anchor_stride)),", "is not None: loss = loss * weight if avg_factor", "label_weights = label_weights.reshape(-1) cls_score = cls_score.permute(0, 2, 3, 1).reshape(-1, self.out_channels)", "or FITNESS FOR A PARTICULAR PURPOSE. See the # MIT", "CustomSmoothL1Loss(nn.Module): \"\"\"Smooth L1 Loss.\"\"\" def __init__(self, beta=1.0, reduction='mean', loss_weight=1.0): \"\"\"Init", "reduction_override if reduction_override else self.reduction) if target.numel() > 0: loss_bbox", "@NetworkFactory.register(NetTypes.Operator) class RpnLossInput(nn.Module): \"\"\"Rpn loss input.\"\"\" def __init__(self): super(RpnLossInput, self).__init__()", "loss. :param pred: predict :param target: target :param beta: beta", "'add_gt_as_proposals': False}, 'allowed_border': 0, 'pos_weight': -1, 'debug': False}) self.sampling =", "desc: config dict \"\"\" super(CustomCrossEntropyLoss, self).__init__() self.use_sigmoid = use_sigmoid self.use_mask", "1.0, 2.0] self.anchor_strides = anchor_strides or [4, 8, 16, 32,", "label_channels=self.label_channels, sampling=self.sampling) @NetworkFactory.register(NetTypes.Operator) class Anchors(nn.Module): \"\"\"Get anchors according to feature", "range(num_levels): anchor_stride = self.anchor_strides[i] feat_h, feat_w = featmap_sizes[i] h, w,", "loss.\"\"\" super(CustomSmoothL1Loss, self).__init__() self.beta = beta self.reduction = reduction self.loss_weight", "func: function :param args: args of function :return: result \"\"\"", "0.3]), 'min_pos_iou': 0.3, 'ignore_iof_thr': 0.5}, 'sampler': {'name': 'RandomSampler', 'num': 256,", "loss_bbox @NetworkFactory.register(NetTypes.Operator) class CustomCrossEntropyLoss(nn.Module): \"\"\"Cross Entropy Loss.\"\"\" def __init__(self, use_sigmoid=False,", "in enumerate(img_metas): multi_level_flags = [] for i in range(num_levels): anchor_stride", "multi_level_flags = [] for i in range(num_levels): anchor_stride = self.anchor_strides[i]", "AnchorGenerator from vega.core.common.config import Config from functools import partial import", "labels, label_weights, avg_factor=num_total_samples) bbox_targets = bbox_targets.reshape(-1, 4) bbox_weights = bbox_weights.reshape(-1,", "32] self.anchor_ratios = anchor_ratios or [0.5, 1.0, 2.0] self.anchor_strides =", "valid_flag_list, multi_level_anchors, gt_bboxes, img_metas, gt_bboxes_ignore def multi_apply(func, *args, **kwargs): \"\"\"Multi", "label_channels: label channels :return: binary label and label weights \"\"\"", "loss def cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): \"\"\"Cross entropy losses.", "loss elif reduction_function == 1: return loss.mean() elif reduction_function ==", "1] = 1 if label_weights is None: bin_label_weights = None", "is free software; you can redistribute it and/or modify #", "def mask_cross_entropy(pred, target, label, reduction='mean', avg_factor=None): \"\"\"Mask cross entropy loss.", "super(RpnClsLossInput, self).__init__() def forward(self, x): \"\"\"Get cls score and bbox", "elif self.use_mask: self.loss_function = mask_cross_entropy else: self.loss_function = cross_entropy def", "losses :param weight: weight :param reduction: reduce function :param avg_factor:", "= _expand_binary_labels(label, weight, pred.size(-1)) if weight is not None: weight", "bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) loss_bbox = self.loss_bbox(bbox_pred,", "label, reduction='mean', avg_factor=None): \"\"\"Mask cross entropy loss. :param pred: predict", "weights \"\"\" bin_labels = labels.new_full((labels.size(0), label_channels), 0) inds = torch.nonzero(labels", "self.anchor_scales, self.anchor_ratios)) num_imgs = len(img_metas) num_levels = len(featmap_sizes) multi_level_anchors =", "cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): \"\"\"Cross entropy losses. :param pred:", ".0, .0, .0) self.target_stds = target_stds or (1.0, 1.0, 1.0,", "( reduction_override if reduction_override else self.reduction) loss_cls = self.loss_weight *", "funtion :return: loss \"\"\" reduction_function = F._Reduction.get_enum(reduction) if reduction_function ==", "torch.nn as nn import torch from vega.search_space.networks.network_factory import NetworkFactory from", "binary label and label weights \"\"\" bin_labels = labels.new_full((labels.size(0), label_channels),", "use_sigmoid=False, use_mask=False, reduction='mean', loss_weight=1.0): \"\"\"Init Cross Entropy loss. :param desc:", "coding: utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co.,", "# but WITHOUT ANY WARRANTY; without even the implied warranty", "label, reduction='none') if weight is not None: weight = weight.float()", "torch.nn.functional as F import torch.nn as nn import torch from", "bin_label_weights def binary_cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): \"\"\"Binary cross entropy", "target.size() and target.numel() > 0 diff = torch.abs(pred - target)", "*args) return tuple(map(list, zip(*map_results))) @NetworkFactory.register(NetTypes.Operator) class RpnClsLoss(nn.Module): \"\"\"Rpn Class Loss.\"\"\"", "This program is distributed in the hope that it will", "4) loss_bbox = self.loss_bbox(bbox_pred, bbox_targets, bbox_weights, avg_factor=num_total_samples) return loss_cls, loss_bbox", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "weight, avg_factor, reduction_override=None, **kwargs): \"\"\"Forward compute.\"\"\" assert reduction_override in (None,", "avg_factor=None): \"\"\"Cross entropy losses. :param pred: predict result :param label:", "\"\"\"Forward compute. :param pred: predict :param target: target :param weight:", "cls score and bbox preds.\"\"\" cls_scores = x[0] bbox_preds =", "avg_factor is None num_rois = pred.size()[0] inds = torch.arange(0, num_rois,", "under the terms of the MIT License. # This program", "anchor_base in self.anchor_base_sizes: anchor_generators.append(AnchorGenerator(anchor_base, self.anchor_scales, self.anchor_ratios)) num_imgs = len(img_metas) num_levels", "loss. :param loss: losses :param weight: weight :param reduction: reduce", "is None: bin_label_weights = None else: bin_label_weights = label_weights.view(-1, 1).expand(label_weights.size(0),", "factor :return: loss \"\"\" if pred.dim() != label.dim(): label, weight", "reduce function :param avg_factor: avg factor :return: loss \"\"\" assert", "(cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_pos, num_total_neg, num_total_samples) =", "bbox_targets, bbox_weights, num_total_pos, num_total_neg, num_total_samples) = x losses_cls, losses_bbox =", "def __init__(self): super(RpnLossInput, self).__init__() def forward(self, x): \"\"\"Get cls score.\"\"\"", "= CustomCrossEntropyLoss() self.loss_bbox = CustomSmoothL1Loss() self.out_channels = out_channels def forward(self,", "else: self.loss_function = cross_entropy def forward(self, cls_score, label, weight, avg_factor,", "None: weight = weight.float() loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor)", "to feature map sizes.\"\"\" def __init__(self, anchor_base_sizes_cfg=None, anchor_scales=None, anchor_ratios=None, anchor_strides=None):", "label, weight=None, reduction='mean', avg_factor=None): \"\"\"Cross entropy losses. :param pred: predict", "it and/or modify # it under the terms of the", "label_weights is None: bin_label_weights = None else: bin_label_weights = label_weights.view(-1,", "beta: beta :return: loss \"\"\" assert beta > 0 assert", "vega.search_space.networks.pytorch.utils.bbox_utils.anchor_generator import AnchorGenerator from vega.core.common.config import Config from functools import", "= use_mask self.reduction = reduction self.loss_weight = loss_weight if self.use_sigmoid:", "ValueError('avg_factor can not be used with reduction=\"sum\"') return loss def", "__init__(self, anchor_base_sizes_cfg=None, anchor_scales=None, anchor_ratios=None, anchor_strides=None): self.anchor_base_sizes_cfg = anchor_base_sizes_cfg self.anchor_scales =", "= featmap_sizes[i] h, w, _ = img_meta['pad_shape'] valid_feat_h = min(int(np.ceil(h", "i in range(num_levels): anchor_stride = self.anchor_strides[i] feat_h, feat_w = featmap_sizes[i]", "it will be useful, # but WITHOUT ANY WARRANTY; without", "cls_scores] anchor_generators = [] for anchor_base in self.anchor_base_sizes: anchor_generators.append(AnchorGenerator(anchor_base, self.anchor_scales,", "reduction = ( reduction_override if reduction_override else self.reduction) if target.numel()", "if pred.dim() != label.dim(): label, weight = _expand_binary_labels(label, weight, pred.size(-1))", "label_channels) return bin_labels, bin_label_weights def binary_cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None):", "= loss.sum() / avg_factor elif reduction != 'none': raise ValueError('avg_factor", "label: gt label :param weight: weight :param reduction: reduce function", "feat_w) flags = anchor_generators[i].valid_flags((feat_h, feat_w), (valid_feat_h, valid_feat_w)) multi_level_flags.append(flags) valid_flag_list.append(multi_level_flags) return", "@NetworkFactory.register(NetTypes.Operator) class Anchors(nn.Module): \"\"\"Get anchors according to feature map sizes.\"\"\"", "bbox_weights.reshape(-1, 4) bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) loss_bbox", ":param label_weights: label weights :param label_channels: label channels :return: binary", "F.binary_cross_entropy_with_logits(pred_slice, target, reduction='mean')[None] def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None): \"\"\"Weight reduce", "terms of the MIT License. # This program is distributed", "if avg_factor is None: loss = reduce_loss(loss, reduction) else: if", "free software; you can redistribute it and/or modify # it", "= loss * weight if avg_factor is None: loss =", "gt_bboxes, img_metas, gt_bboxes_ignore @NetworkFactory.register(NetTypes.Operator) class AnchorTargetOp(nn.Module): \"\"\"Anchor Target.\"\"\" def __init__(self,", "= anchor_scales or [8, 16, 32] self.anchor_ratios = anchor_ratios or", "return anchor_list, valid_flag_list, multi_level_anchors, gt_bboxes, img_metas, gt_bboxes_ignore def multi_apply(func, *args,", "gt_bboxes_ignore = x # out=(labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, num_total_pos,num_total_neg). return", "[] for i in range(num_levels): anchor_stride = self.anchor_strides[i] feat_h, feat_w", "avg_factor=avg_factor, **kwargs) return loss_bbox else: return torch.FloatTensor([0.0]).cuda() @weighted_loss def smooth_l1_loss(pred,", "/ beta, diff - 0.5 * beta) return loss def", "return loss elif reduction_function == 1: return loss.mean() elif reduction_function", "FOR A PARTICULAR PURPOSE. See the # MIT License for", "operators.\"\"\" import torch.nn.functional as F import torch.nn as nn import", "1).reshape(-1, self.out_channels) loss_cls = self.loss_cls(cls_score, labels, label_weights, avg_factor=num_total_samples) bbox_targets =", "Class Loss.\"\"\" def __init__(self, out_channels=2): super(RpnClsLoss, self).__init__() self.loss_cls = CustomCrossEntropyLoss()", "bbox_preds @NetworkFactory.register(NetTypes.Operator) class RpnLossInput(nn.Module): \"\"\"Rpn loss input.\"\"\" def __init__(self): super(RpnLossInput,", "avg_factor: avg factor :param reduction_override: reduce override :return: loss \"\"\"", "hope that it will be useful, # but WITHOUT ANY", "weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor) return loss def _expand_binary_labels(labels, label_weights, label_channels):", "binary labels. :param labels: labels :param label_weights: label weights :param", "label_weights, bbox_targets, bbox_weights, num_total_samples): \"\"\"Get loss.\"\"\" labels = labels.reshape(-1) label_weights", "1).squeeze() if inds.numel() > 0: bin_labels[inds, labels[inds] - 1] =", "= pred[inds, label].squeeze(1) return F.binary_cross_entropy_with_logits(pred_slice, target, reduction='mean')[None] def weight_reduce_loss(loss, weight=None,", "Co., Ltd. All rights reserved. # This program is free", "map_results = map(pfunc, *args) return tuple(map(list, zip(*map_results))) @NetworkFactory.register(NetTypes.Operator) class RpnClsLoss(nn.Module):", "program is distributed in the hope that it will be", "super(RpnClsLoss, self).__init__() self.loss_cls = CustomCrossEntropyLoss() self.loss_bbox = CustomSmoothL1Loss() self.out_channels =", "if reduction_function == 0: return loss elif reduction_function == 1:", ":param avg_factor: avg factor :return: loss \"\"\" loss = F.cross_entropy(pred,", "img_meta in enumerate(img_metas): multi_level_flags = [] for i in range(num_levels):", "target_means=None, target_stds=None, num_classes=2, use_sigmoid_cls=False, cfg=None, sampling=True): self.target_means = target_means or", "4) bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) loss_bbox =", "super(CustomCrossEntropyLoss, self).__init__() self.use_sigmoid = use_sigmoid self.use_mask = use_mask self.reduction =", "details. \"\"\"Import all torch operators.\"\"\" import torch.nn.functional as F import", "self).__init__() def forward(self, x): \"\"\"Create anchor.\"\"\" cls_scores, bbox_preds, gt_bboxes, img_metas,", "losses. :param pred: predict result :param label: gt label :param", "0: bin_labels[inds, labels[inds] - 1] = 1 if label_weights is", "bin_labels[inds, labels[inds] - 1] = 1 if label_weights is None:", "reduction) else: if reduction == 'mean': loss = loss.sum() /", "= 1 if label_weights is None: bin_label_weights = None else:", "from vega.search_space.networks.pytorch.utils.anchor_utils.anchor_target import AnchorTarget from vega.search_space.networks.pytorch.utils.bbox_utils.anchor_generator import AnchorGenerator from vega.core.common.config", "= sampling super(AnchorTargetOp, self).__init__() def forward(self, x): \"\"\"Create X=(anchor_list,valid_flag_list,gt_bboxes,img_metas,).\"\"\" anchor_list,", "**kwargs) return loss_bbox else: return torch.FloatTensor([0.0]).cuda() @weighted_loss def smooth_l1_loss(pred, target,", "* self.loss_function(cls_score, label, weight, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_cls @NetworkFactory.register(NetTypes.Operator)", "class AnchorTargetOp(nn.Module): \"\"\"Anchor Target.\"\"\" def __init__(self, target_means=None, target_stds=None, num_classes=2, use_sigmoid_cls=False,", "FITNESS FOR A PARTICULAR PURPOSE. See the # MIT License", "self.target_stds, self.cfg, gt_bboxes_ignore_list=gt_bboxes_ignore, gt_labels_list=None, label_channels=self.label_channels, sampling=self.sampling) @NetworkFactory.register(NetTypes.Operator) class Anchors(nn.Module): \"\"\"Get", "diff - 0.5 * beta) return loss def cross_entropy(pred, label,", "anchor_strides or [4, 8, 16, 32, 64] self.anchor_base_sizes = list(", "cross_entropy def forward(self, cls_score, label, weight, avg_factor, reduction_override=None, **kwargs): \"\"\"Forward", "bbox preds.\"\"\" cls_scores = x[0] bbox_preds = x[1] return cls_scores,", "'pos_fraction': 0.5, 'neg_pos_ub': -1, 'add_gt_as_proposals': False}, 'allowed_border': 0, 'pos_weight': -1,", "assert pred.size() == target.size() and target.numel() > 0 diff =", "channels :return: binary label and label weights \"\"\" bin_labels =", "predict result :param target: target :param label: gt label :param", "= [] for anchor_base in self.anchor_base_sizes: anchor_generators.append(AnchorGenerator(anchor_base, self.anchor_scales, self.anchor_ratios)) num_imgs", "torch.where(diff < beta, 0.5 * diff * diff / beta,", ".0, .0) self.target_stds = target_stds or (1.0, 1.0, 1.0, 1.0)", "PURPOSE. See the # MIT License for more details. \"\"\"Import", ":param pred: predict :param target: target :param weight: weight :param", "compute. :param pred: predict :param target: target :param weight: weight", "Config from functools import partial import numpy as np from", "target, weight=None, avg_factor=None, reduction_override=None, **kwargs): \"\"\"Forward compute. :param pred: predict", "def __init__(self, anchor_base_sizes_cfg=None, anchor_scales=None, anchor_ratios=None, anchor_strides=None): self.anchor_base_sizes_cfg = anchor_base_sizes_cfg self.anchor_scales", "will be useful, # but WITHOUT ANY WARRANTY; without even", "losses_bbox def loss(self, cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_samples):", "labels: labels :param label_weights: label weights :param label_channels: label channels", "self).__init__() def forward(self, x): \"\"\"Get cls score.\"\"\" cls_scores = x[2][0]", "pred: predict result :param label: gt label :param weight: weight", "self).__init__() self.use_sigmoid = use_sigmoid self.use_mask = use_mask self.reduction = reduction", "if label_weights is None: bin_label_weights = None else: bin_label_weights =", "else: bin_label_weights = label_weights.view(-1, 1).expand(label_weights.size(0), label_channels) return bin_labels, bin_label_weights def", "'debug': False}) self.sampling = sampling super(AnchorTargetOp, self).__init__() def forward(self, x):", "not None: weight = weight.float() loss = weight_reduce_loss(loss, weight=weight, reduction=reduction,", "x[2][0] bbox_preds = x[2][1] gt_bboxes = x[0]['gt_bboxes'].cuda() img_metas = [x[0]['img_meta']]", "target :param beta: beta :return: loss \"\"\" assert beta >", "= anchor_base_sizes_cfg self.anchor_scales = anchor_scales or [8, 16, 32] self.anchor_ratios", "in self.anchor_base_sizes: anchor_generators.append(AnchorGenerator(anchor_base, self.anchor_scales, self.anchor_ratios)) num_imgs = len(img_metas) num_levels =", "avg_factor=num_total_samples) return loss_cls, loss_bbox @NetworkFactory.register(NetTypes.Operator) class CustomCrossEntropyLoss(nn.Module): \"\"\"Cross Entropy Loss.\"\"\"", "pred.size(-1)) if weight is not None: weight = weight.float() loss", ":param pred: predict result :param target: target :param label: gt", "MIT License for more details. \"\"\"Import all torch operators.\"\"\" import", "All rights reserved. # This program is free software; you", "reduction_override=None, **kwargs): \"\"\"Forward compute.\"\"\" assert reduction_override in (None, 'none', 'mean',", "self.loss_bbox = CustomSmoothL1Loss() self.out_channels = out_channels def forward(self, x): \"\"\"Get", "pred, target, weight, beta=self.beta, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_bbox else:", "args: args of function :return: result \"\"\" pfunc = partial(func,", "loss \"\"\" reduction_function = F._Reduction.get_enum(reduction) if reduction_function == 0: return", "sampling=True): self.target_means = target_means or (.0, .0, .0, .0) self.target_stds", "2.0] self.anchor_strides = anchor_strides or [4, 8, 16, 32, 64]", "dtype=torch.long, device=pred.device) pred_slice = pred[inds, label].squeeze(1) return F.binary_cross_entropy_with_logits(pred_slice, target, reduction='mean')[None]", "2020. Huawei Technologies Co., Ltd. All rights reserved. # This", "self).__init__() def forward(self, x): \"\"\"Create X=(anchor_list,valid_flag_list,gt_bboxes,img_metas,).\"\"\" anchor_list, valid_flag_list, original_anchors, gt_bboxes,", "losses :param reduction: reduce funtion :return: loss \"\"\" reduction_function =", "'min_pos_iou': 0.3, 'ignore_iof_thr': 0.5}, 'sampler': {'name': 'RandomSampler', 'num': 256, 'pos_fraction':", "\"\"\" bin_labels = labels.new_full((labels.size(0), label_channels), 0) inds = torch.nonzero(labels >=", "* diff * diff / beta, diff - 0.5 *", "\"\"\" if weight is not None: loss = loss *", "can not be used with reduction=\"sum\"') return loss def reduce_loss(loss,", "as nn import torch from vega.search_space.networks.network_factory import NetworkFactory from vega.search_space.networks.net_utils", "function :param avg_factor: avg factor :return: loss \"\"\" assert reduction", "weight, pred.size(-1)) if weight is not None: weight = weight.float()", "self.anchor_strides[i] feat_h, feat_w = featmap_sizes[i] h, w, _ = img_meta['pad_shape']", "= beta self.reduction = reduction self.loss_weight = loss_weight def forward(self,", "function :param avg_factor: avg factor :return: loss \"\"\" loss =", "num_total_pos,num_total_neg). return AnchorTarget(anchor_list, valid_flag_list, gt_bboxes, img_metas, self.target_means, self.target_stds, self.cfg, gt_bboxes_ignore_list=gt_bboxes_ignore,", "use_sigmoid_cls=False, cfg=None, sampling=True): self.target_means = target_means or (.0, .0, .0,", "<reponame>Huawei-Ascend/modelzoo<filename>built-in/TensorFlow/Research/cv/image_classification/Cars_for_TensorFlow/automl/vega/search_space/networks/pytorch/operator/rpn.py # -*- coding: utf-8 -*- # Copyright (C) 2020.", "self.loss_function = mask_cross_entropy else: self.loss_function = cross_entropy def forward(self, cls_score,", "elif reduction != 'none': raise ValueError('avg_factor can not be used", "smooth_l1_loss( pred, target, weight, beta=self.beta, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_bbox", "diff = torch.abs(pred - target) loss = torch.where(diff < beta,", "bin_labels = labels.new_full((labels.size(0), label_channels), 0) inds = torch.nonzero(labels >= 1).squeeze()", "or [0.5, 1.0, 2.0] self.anchor_strides = anchor_strides or [4, 8,", "score and bbox preds.\"\"\" cls_scores = x[0] bbox_preds = x[1]", "vega.search_space.networks.network_factory import NetworkFactory from vega.search_space.networks.net_utils import NetTypes from vega.search_space.networks.pytorch.utils.anchor_utils.anchor_target import", "import AnchorGenerator from vega.core.common.config import Config from functools import partial", "anchor_generators[i].valid_flags((feat_h, feat_w), (valid_feat_h, valid_feat_w)) multi_level_flags.append(flags) valid_flag_list.append(multi_level_flags) return anchor_list, valid_flag_list, multi_level_anchors,", "img_metas, gt_bboxes_ignore @NetworkFactory.register(NetTypes.Operator) class AnchorTargetOp(nn.Module): \"\"\"Anchor Target.\"\"\" def __init__(self, target_means=None,", "from vega.core.common.config import Config from functools import partial import numpy", "/ anchor_stride)), feat_w) flags = anchor_generators[i].valid_flags((feat_h, feat_w), (valid_feat_h, valid_feat_w)) multi_level_flags.append(flags)", "program is free software; you can redistribute it and/or modify", "loss(self, cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_samples): \"\"\"Get loss.\"\"\"", "* smooth_l1_loss( pred, target, weight, beta=self.beta, reduction=reduction, avg_factor=avg_factor, **kwargs) return", "= pred.size()[0] inds = torch.arange(0, num_rois, dtype=torch.long, device=pred.device) pred_slice =", "bbox_targets, bbox_weights, num_total_samples): \"\"\"Get loss.\"\"\" labels = labels.reshape(-1) label_weights =", "# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights", "\"\"\" if pred.dim() != label.dim(): label, weight = _expand_binary_labels(label, weight,", "cls_scores = x[2][0] bbox_preds = x[2][1] gt_bboxes = x[0]['gt_bboxes'].cuda() img_metas", "16, 32] self.anchor_ratios = anchor_ratios or [0.5, 1.0, 2.0] self.anchor_strides", "according to feature map sizes.\"\"\" def __init__(self, anchor_base_sizes_cfg=None, anchor_scales=None, anchor_ratios=None,", "img_metas, gt_bboxes_ignore = x featmap_sizes = [featmap.size()[-2:] for featmap in", "F import torch.nn as nn import torch from vega.search_space.networks.network_factory import", "pred_slice = pred[inds, label].squeeze(1) return F.binary_cross_entropy_with_logits(pred_slice, target, reduction='mean')[None] def weight_reduce_loss(loss,", "> 0: loss_bbox = self.loss_weight * smooth_l1_loss( pred, target, weight,", "labels. :param labels: labels :param label_weights: label weights :param label_channels:", "redistribute it and/or modify # it under the terms of", "{'name': 'MaxIoUAllNegAssigner', 'pos_iou_thr': 0.7, 'neg_iou_thr': tuple([-1, 0.3]), 'min_pos_iou': 0.3, 'ignore_iof_thr':", "target: target :param beta: beta :return: loss \"\"\" assert beta", "reduction='mean', avg_factor=None): \"\"\"Weight reduce loss. :param loss: losses :param weight:", "label :param reduction: reduce function :param avg_factor: avg factor :return:", "anchor_stride = self.anchor_strides[i] feat_h, feat_w = featmap_sizes[i] h, w, _", "num_total_samples) = x losses_cls, losses_bbox = multi_apply(self.loss, cls_score, bbox_pred, labels,", "PARTICULAR PURPOSE. See the # MIT License for more details.", "@weighted_loss def smooth_l1_loss(pred, target, beta=1.0): \"\"\"Smooth l1 loss. :param pred:", "self.reduction) loss_cls = self.loss_weight * self.loss_function(cls_score, label, weight, reduction=reduction, avg_factor=avg_factor,", "reduction_function == 0: return loss elif reduction_function == 1: return", "reduction self.loss_weight = loss_weight if self.use_sigmoid: self.loss_function = binary_cross_entropy elif", "from six.moves import map, zip from vega.search_space.networks.pytorch.losses.reduce_loss import weighted_loss @NetworkFactory.register(NetTypes.Operator)", "\"\"\"Get cls score and bbox preds.\"\"\" cls_scores = x[0] bbox_preds", "self.label_channels = num_classes if use_sigmoid_cls else 1 self.cfg = Config({'assigner':", "self.anchor_base_sizes: anchor_generators.append(AnchorGenerator(anchor_base, self.anchor_scales, self.anchor_ratios)) num_imgs = len(img_metas) num_levels = len(featmap_sizes)", "beta=1.0, reduction='mean', loss_weight=1.0): \"\"\"Init smooth l1 loss.\"\"\" super(CustomSmoothL1Loss, self).__init__() self.beta", "self.loss_weight = loss_weight def forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None,", "loss_cls = self.loss_weight * self.loss_function(cls_score, label, weight, reduction=reduction, avg_factor=avg_factor, **kwargs)", "bin_label_weights = label_weights.view(-1, 1).expand(label_weights.size(0), label_channels) return bin_labels, bin_label_weights def binary_cross_entropy(pred,", "= reduce_loss(loss, reduction) else: if reduction == 'mean': loss =", "loss: losses :param weight: weight :param reduction: reduce function :param", "x[0]['gt_bboxes_ignore'].cuda() return cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore @NetworkFactory.register(NetTypes.Operator) class AnchorTargetOp(nn.Module):", "num_total_samples=num_total_samples) return losses_cls, losses_bbox def loss(self, cls_score, bbox_pred, labels, label_weights,", "forward(self, x): \"\"\"Create anchor.\"\"\" cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore =", "loss = F.binary_cross_entropy_with_logits( pred, label.float(), weight, reduction='none') loss = weight_reduce_loss(loss,", "CustomCrossEntropyLoss() self.loss_bbox = CustomSmoothL1Loss() self.out_channels = out_channels def forward(self, x):", "losses_cls, losses_bbox def loss(self, cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights,", "num_total_neg, num_total_samples) = x losses_cls, losses_bbox = multi_apply(self.loss, cls_score, bbox_pred,", "num_rois, dtype=torch.long, device=pred.device) pred_slice = pred[inds, label].squeeze(1) return F.binary_cross_entropy_with_logits(pred_slice, target,", "label, weight, avg_factor, reduction_override=None, **kwargs): \"\"\"Forward compute.\"\"\" assert reduction_override in", "bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore = x featmap_sizes = [featmap.size()[-2:] for", "Technologies Co., Ltd. All rights reserved. # This program is", "self.anchor_base_sizes_cfg = anchor_base_sizes_cfg self.anchor_scales = anchor_scales or [8, 16, 32]", "factor :return: loss \"\"\" assert reduction == 'mean' and avg_factor", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "loss. :param pred: predict result :param label: gt label :param", "= torch.arange(0, num_rois, dtype=torch.long, device=pred.device) pred_slice = pred[inds, label].squeeze(1) return", "bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_pos, num_total_neg, num_total_samples) = x", "0: loss_bbox = self.loss_weight * smooth_l1_loss( pred, target, weight, beta=self.beta,", "in (None, 'none', 'mean', 'sum') reduction = ( reduction_override if", "avg_factor: avg factor :return: loss \"\"\" loss = F.cross_entropy(pred, label,", "[multi_level_anchors for _ in range(num_imgs)] valid_flag_list = [] for img_id,", "gt_bboxes_ignore @NetworkFactory.register(NetTypes.Operator) class AnchorTargetOp(nn.Module): \"\"\"Anchor Target.\"\"\" def __init__(self, target_means=None, target_stds=None,", "-*- # Copyright (C) 2020. Huawei Technologies Co., Ltd. All", "all torch operators.\"\"\" import torch.nn.functional as F import torch.nn as", "in cls_scores] anchor_generators = [] for anchor_base in self.anchor_base_sizes: anchor_generators.append(AnchorGenerator(anchor_base,", "i in range(num_levels): anchors = anchor_generators[i].grid_anchors(featmap_sizes[i], self.anchor_strides[i]) multi_level_anchors.append(anchors) anchor_list =", "multi_apply(self.loss, cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_samples=num_total_samples) return losses_cls,", "bbox_weights = bbox_weights.reshape(-1, 4) bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1,", "label, weight, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_cls @NetworkFactory.register(NetTypes.Operator) class CustomSmoothL1Loss(nn.Module):", "kwargs else func map_results = map(pfunc, *args) return tuple(map(list, zip(*map_results)))", "loss input.\"\"\" def __init__(self): super(RpnLossInput, self).__init__() def forward(self, x): \"\"\"Get", "self.use_sigmoid: self.loss_function = binary_cross_entropy elif self.use_mask: self.loss_function = mask_cross_entropy else:", "the hope that it will be useful, # but WITHOUT", "reduction_override in (None, 'none', 'mean', 'sum') reduction = ( reduction_override", "\"\"\"Create X=(anchor_list,valid_flag_list,gt_bboxes,img_metas,).\"\"\" anchor_list, valid_flag_list, original_anchors, gt_bboxes, img_metas, gt_bboxes_ignore = x", "label_weights: label weights :param label_channels: label channels :return: binary label", "min(int(np.ceil(h / anchor_stride)), feat_h) valid_feat_w = min(int(np.ceil(w / anchor_stride)), feat_w)", "label_weights, avg_factor=num_total_samples) bbox_targets = bbox_targets.reshape(-1, 4) bbox_weights = bbox_weights.reshape(-1, 4)", "label_weights, bbox_targets, bbox_weights, num_total_pos, num_total_neg, num_total_samples) = x losses_cls, losses_bbox", "x.\"\"\" (cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_pos, num_total_neg, num_total_samples)", "use_sigmoid_cls else 1 self.cfg = Config({'assigner': {'name': 'MaxIoUAllNegAssigner', 'pos_iou_thr': 0.7,", "loss_bbox else: return torch.FloatTensor([0.0]).cuda() @weighted_loss def smooth_l1_loss(pred, target, beta=1.0): \"\"\"Smooth", "result :param label: gt label :param weight: weight :param reduction:", "def __init__(self, beta=1.0, reduction='mean', loss_weight=1.0): \"\"\"Init smooth l1 loss.\"\"\" super(CustomSmoothL1Loss,", "None: loss = loss * weight if avg_factor is None:", "out_channels=2): super(RpnClsLoss, self).__init__() self.loss_cls = CustomCrossEntropyLoss() self.loss_bbox = CustomSmoothL1Loss() self.out_channels", "RpnLossInput(nn.Module): \"\"\"Rpn loss input.\"\"\" def __init__(self): super(RpnLossInput, self).__init__() def forward(self,", "gt_bboxes, img_metas, gt_bboxes_ignore = x # out=(labels_list, label_weights_list, bbox_targets_list, bbox_weights_list,", "self.reduction) if target.numel() > 0: loss_bbox = self.loss_weight * smooth_l1_loss(", ":param beta: beta :return: loss \"\"\" assert beta > 0", "0.5 * diff * diff / beta, diff - 0.5", "_expand_binary_labels(label, weight, pred.size(-1)) if weight is not None: weight =", "img_metas, gt_bboxes_ignore def multi_apply(func, *args, **kwargs): \"\"\"Multi apply. :param func:", "== 0: return loss elif reduction_function == 1: return loss.mean()", "class RpnClsLossInput(nn.Module): \"\"\"Rpn input.\"\"\" def __init__(self): super(RpnClsLossInput, self).__init__() def forward(self,", "valid_flag_list.append(multi_level_flags) return anchor_list, valid_flag_list, multi_level_anchors, gt_bboxes, img_metas, gt_bboxes_ignore def multi_apply(func,", "loss def mask_cross_entropy(pred, target, label, reduction='mean', avg_factor=None): \"\"\"Mask cross entropy", "@NetworkFactory.register(NetTypes.Operator) class AnchorTargetOp(nn.Module): \"\"\"Anchor Target.\"\"\" def __init__(self, target_means=None, target_stds=None, num_classes=2,", "elif reduction_function == 1: return loss.mean() elif reduction_function == 2:", "See the # MIT License for more details. \"\"\"Import all", "the terms of the MIT License. # This program is", "class Anchors(nn.Module): \"\"\"Get anchors according to feature map sizes.\"\"\" def", "\"\"\"Init Cross Entropy loss. :param desc: config dict \"\"\" super(CustomCrossEntropyLoss,", "Cross Entropy loss. :param desc: config dict \"\"\" super(CustomCrossEntropyLoss, self).__init__()", "= weight.float() loss = F.binary_cross_entropy_with_logits( pred, label.float(), weight, reduction='none') loss", ":param loss: losses :param reduction: reduce funtion :return: loss \"\"\"", "vega.core.common.config import Config from functools import partial import numpy as", "def smooth_l1_loss(pred, target, beta=1.0): \"\"\"Smooth l1 loss. :param pred: predict", "cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore = x featmap_sizes = [featmap.size()[-2:]", "if reduction_override else self.reduction) loss_cls = self.loss_weight * self.loss_function(cls_score, label,", "RpnClsLossInput(nn.Module): \"\"\"Rpn input.\"\"\" def __init__(self): super(RpnClsLossInput, self).__init__() def forward(self, x):", "vega.search_space.networks.pytorch.losses.reduce_loss import weighted_loss @NetworkFactory.register(NetTypes.Operator) class RpnClsLossInput(nn.Module): \"\"\"Rpn input.\"\"\" def __init__(self):", "valid_feat_w = min(int(np.ceil(w / anchor_stride)), feat_w) flags = anchor_generators[i].valid_flags((feat_h, feat_w),", "anchor_stride)), feat_w) flags = anchor_generators[i].valid_flags((feat_h, feat_w), (valid_feat_h, valid_feat_w)) multi_level_flags.append(flags) valid_flag_list.append(multi_level_flags)", "= self.loss_weight * smooth_l1_loss( pred, target, weight, beta=self.beta, reduction=reduction, avg_factor=avg_factor,", "flags = anchor_generators[i].valid_flags((feat_h, feat_w), (valid_feat_h, valid_feat_w)) multi_level_flags.append(flags) valid_flag_list.append(multi_level_flags) return anchor_list,", "img_metas, self.target_means, self.target_stds, self.cfg, gt_bboxes_ignore_list=gt_bboxes_ignore, gt_labels_list=None, label_channels=self.label_channels, sampling=self.sampling) @NetworkFactory.register(NetTypes.Operator) class", "cls_scores, bbox_preds @NetworkFactory.register(NetTypes.Operator) class RpnLossInput(nn.Module): \"\"\"Rpn loss input.\"\"\" def __init__(self):", "original_anchors, gt_bboxes, img_metas, gt_bboxes_ignore = x # out=(labels_list, label_weights_list, bbox_targets_list,", "anchor_ratios=None, anchor_strides=None): self.anchor_base_sizes_cfg = anchor_base_sizes_cfg self.anchor_scales = anchor_scales or [8,", "loss = loss.sum() / avg_factor elif reduction != 'none': raise", "loss \"\"\" if weight is not None: loss = loss", "**kwargs): \"\"\"Forward compute. :param pred: predict :param target: target :param", "(valid_feat_h, valid_feat_w)) multi_level_flags.append(flags) valid_flag_list.append(multi_level_flags) return anchor_list, valid_flag_list, multi_level_anchors, gt_bboxes, img_metas,", "def reduce_loss(loss, reduction): \"\"\"Reduce loss compute. :param loss: losses :param", "reduction='mean', avg_factor=None): \"\"\"Binary cross entropy loss. :param pred: predict result", "def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None): \"\"\"Weight reduce loss. :param loss:", "weight = _expand_binary_labels(label, weight, pred.size(-1)) if weight is not None:", "= F._Reduction.get_enum(reduction) if reduction_function == 0: return loss elif reduction_function", "def forward(self, cls_score, label, weight, avg_factor, reduction_override=None, **kwargs): \"\"\"Forward compute.\"\"\"", "list( self.anchor_strides) if self.anchor_base_sizes_cfg is None else self.anchor_base_sizes_cfg super(Anchors, self).__init__()", "anchor_generators.append(AnchorGenerator(anchor_base, self.anchor_scales, self.anchor_ratios)) num_imgs = len(img_metas) num_levels = len(featmap_sizes) multi_level_anchors", "__init__(self): super(RpnClsLossInput, self).__init__() def forward(self, x): \"\"\"Get cls score and", "sampling=self.sampling) @NetworkFactory.register(NetTypes.Operator) class Anchors(nn.Module): \"\"\"Get anchors according to feature map", "feature map sizes.\"\"\" def __init__(self, anchor_base_sizes_cfg=None, anchor_scales=None, anchor_ratios=None, anchor_strides=None): self.anchor_base_sizes_cfg", "forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None, **kwargs): \"\"\"Forward compute. :param", "__init__(self, out_channels=2): super(RpnClsLoss, self).__init__() self.loss_cls = CustomCrossEntropyLoss() self.loss_bbox = CustomSmoothL1Loss()", "num_classes if use_sigmoid_cls else 1 self.cfg = Config({'assigner': {'name': 'MaxIoUAllNegAssigner',", "reduce override :return: loss \"\"\" reduction = ( reduction_override if", "= ( reduction_override if reduction_override else self.reduction) if target.numel() >", "if inds.numel() > 0: bin_labels[inds, labels[inds] - 1] = 1", "= multi_apply(self.loss, cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_samples=num_total_samples) return", "reduce_loss(loss, reduction) else: if reduction == 'mean': loss = loss.sum()", "cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_samples=num_total_samples) return losses_cls, losses_bbox", ":param label_channels: label channels :return: binary label and label weights", "weight, reduction='none') loss = weight_reduce_loss(loss, reduction=reduction, avg_factor=avg_factor) return loss def", "distributed in the hope that it will be useful, #", "input.\"\"\" def __init__(self): super(RpnClsLossInput, self).__init__() def forward(self, x): \"\"\"Get cls", "avg factor :param reduction_override: reduce override :return: loss \"\"\" reduction", "0.3, 'ignore_iof_thr': 0.5}, 'sampler': {'name': 'RandomSampler', 'num': 256, 'pos_fraction': 0.5,", "or (.0, .0, .0, .0) self.target_stds = target_stds or (1.0,", ":param target: target :param beta: beta :return: loss \"\"\" assert", "pred.dim() != label.dim(): label, weight = _expand_binary_labels(label, weight, pred.size(-1)) if", "[] for i in range(num_levels): anchors = anchor_generators[i].grid_anchors(featmap_sizes[i], self.anchor_strides[i]) multi_level_anchors.append(anchors)", "def forward(self, x): \"\"\"Get cls score.\"\"\" cls_scores = x[2][0] bbox_preds", "def forward(self, x): \"\"\"Get x.\"\"\" (cls_score, bbox_pred, labels, label_weights, bbox_targets,", "weight=weight, reduction=reduction, avg_factor=avg_factor) return loss def _expand_binary_labels(labels, label_weights, label_channels): \"\"\"Expand", "= x # out=(labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, num_total_pos,num_total_neg). return AnchorTarget(anchor_list,", "'none', 'mean', 'sum') reduction = ( reduction_override if reduction_override else", "pred.size()[0] inds = torch.arange(0, num_rois, dtype=torch.long, device=pred.device) pred_slice = pred[inds,", "avg_factor: avg factor :return: loss \"\"\" assert reduction == 'mean'", "out_channels def forward(self, x): \"\"\"Get x.\"\"\" (cls_score, bbox_pred, labels, label_weights,", "loss = reduce_loss(loss, reduction) else: if reduction == 'mean': loss", "loss: losses :param reduction: reduce funtion :return: loss \"\"\" reduction_function", "weight, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_cls @NetworkFactory.register(NetTypes.Operator) class CustomSmoothL1Loss(nn.Module): \"\"\"Smooth", "# out=(labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, num_total_pos,num_total_neg). return AnchorTarget(anchor_list, valid_flag_list, gt_bboxes,", "self.loss_function(cls_score, label, weight, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_cls @NetworkFactory.register(NetTypes.Operator) class", "label, weight = _expand_binary_labels(label, weight, pred.size(-1)) if weight is not", "x): \"\"\"Get cls score.\"\"\" cls_scores = x[2][0] bbox_preds = x[2][1]", "of the MIT License. # This program is distributed in", "'MaxIoUAllNegAssigner', 'pos_iou_thr': 0.7, 'neg_iou_thr': tuple([-1, 0.3]), 'min_pos_iou': 0.3, 'ignore_iof_thr': 0.5},", "= F.cross_entropy(pred, label, reduction='none') if weight is not None: weight", "map sizes.\"\"\" def __init__(self, anchor_base_sizes_cfg=None, anchor_scales=None, anchor_ratios=None, anchor_strides=None): self.anchor_base_sizes_cfg =", "loss \"\"\" loss = F.cross_entropy(pred, label, reduction='none') if weight is", "reduce_loss(loss, reduction): \"\"\"Reduce loss compute. :param loss: losses :param reduction:", "\"\"\"Expand binary labels. :param labels: labels :param label_weights: label weights", "== 1: return loss.mean() elif reduction_function == 2: return loss.sum()", "**kwargs) return loss_cls @NetworkFactory.register(NetTypes.Operator) class CustomSmoothL1Loss(nn.Module): \"\"\"Smooth L1 Loss.\"\"\" def", "= weight.float() loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor) return loss", ":return: loss \"\"\" reduction_function = F._Reduction.get_enum(reduction) if reduction_function == 0:", "pred.size() == target.size() and target.numel() > 0 diff = torch.abs(pred", "x[1] return cls_scores, bbox_preds @NetworkFactory.register(NetTypes.Operator) class RpnLossInput(nn.Module): \"\"\"Rpn loss input.\"\"\"", "cls_scores = x[0] bbox_preds = x[1] return cls_scores, bbox_preds @NetworkFactory.register(NetTypes.Operator)", "self.anchor_strides) if self.anchor_base_sizes_cfg is None else self.anchor_base_sizes_cfg super(Anchors, self).__init__() def", "\"\"\"Get cls score.\"\"\" cls_scores = x[2][0] bbox_preds = x[2][1] gt_bboxes", "useful, # but WITHOUT ANY WARRANTY; without even the implied", "import AnchorTarget from vega.search_space.networks.pytorch.utils.bbox_utils.anchor_generator import AnchorGenerator from vega.core.common.config import Config", "w, _ = img_meta['pad_shape'] valid_feat_h = min(int(np.ceil(h / anchor_stride)), feat_h)", ":return: loss \"\"\" if weight is not None: loss =", "reduction=reduction, avg_factor=avg_factor) return loss def mask_cross_entropy(pred, target, label, reduction='mean', avg_factor=None):", "x): \"\"\"Create X=(anchor_list,valid_flag_list,gt_bboxes,img_metas,).\"\"\" anchor_list, valid_flag_list, original_anchors, gt_bboxes, img_metas, gt_bboxes_ignore =", "@NetworkFactory.register(NetTypes.Operator) class RpnClsLossInput(nn.Module): \"\"\"Rpn input.\"\"\" def __init__(self): super(RpnClsLossInput, self).__init__() def", "you can redistribute it and/or modify # it under the", "min(int(np.ceil(w / anchor_stride)), feat_w) flags = anchor_generators[i].valid_flags((feat_h, feat_w), (valid_feat_h, valid_feat_w))", "label_weights_list, bbox_targets_list, bbox_weights_list, num_total_pos,num_total_neg). return AnchorTarget(anchor_list, valid_flag_list, gt_bboxes, img_metas, self.target_means,", "input.\"\"\" def __init__(self): super(RpnLossInput, self).__init__() def forward(self, x): \"\"\"Get cls", "len(img_metas) num_levels = len(featmap_sizes) multi_level_anchors = [] for i in", "avg factor :return: loss \"\"\" if weight is not None:", "'num': 256, 'pos_fraction': 0.5, 'neg_pos_ub': -1, 'add_gt_as_proposals': False}, 'allowed_border': 0,", "gt_bboxes_ignore = x[0]['gt_bboxes_ignore'].cuda() return cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore @NetworkFactory.register(NetTypes.Operator)", "def loss(self, cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_samples): \"\"\"Get", ":param avg_factor: avg factor :return: loss \"\"\" if pred.dim() !=", "return loss_bbox else: return torch.FloatTensor([0.0]).cuda() @weighted_loss def smooth_l1_loss(pred, target, beta=1.0):", "multi_level_anchors.append(anchors) anchor_list = [multi_level_anchors for _ in range(num_imgs)] valid_flag_list =", ".0) self.target_stds = target_stds or (1.0, 1.0, 1.0, 1.0) self.label_channels", "loss compute. :param loss: losses :param reduction: reduce funtion :return:", "multi_level_anchors = [] for i in range(num_levels): anchors = anchor_generators[i].grid_anchors(featmap_sizes[i],", "pred: predict :param target: target :param beta: beta :return: loss", "label :param weight: weight :param reduction: reduce function :param avg_factor:", "inds = torch.nonzero(labels >= 1).squeeze() if inds.numel() > 0: bin_labels[inds,", "target, weight, beta=self.beta, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_bbox else: return", "__init__(self, beta=1.0, reduction='mean', loss_weight=1.0): \"\"\"Init smooth l1 loss.\"\"\" super(CustomSmoothL1Loss, self).__init__()", "label weights \"\"\" bin_labels = labels.new_full((labels.size(0), label_channels), 0) inds =", "nn import torch from vega.search_space.networks.network_factory import NetworkFactory from vega.search_space.networks.net_utils import", "256, 'pos_fraction': 0.5, 'neg_pos_ub': -1, 'add_gt_as_proposals': False}, 'allowed_border': 0, 'pos_weight':", "= [] for img_id, img_meta in enumerate(img_metas): multi_level_flags = []", "is None else self.anchor_base_sizes_cfg super(Anchors, self).__init__() def forward(self, x): \"\"\"Create", ":param loss: losses :param weight: weight :param reduction: reduce function", "_expand_binary_labels(labels, label_weights, label_channels): \"\"\"Expand binary labels. :param labels: labels :param", "self.loss_function = cross_entropy def forward(self, cls_score, label, weight, avg_factor, reduction_override=None,", "weight=None, reduction='mean', avg_factor=None): \"\"\"Weight reduce loss. :param loss: losses :param", "= anchor_strides or [4, 8, 16, 32, 64] self.anchor_base_sizes =", "self).__init__() self.loss_cls = CustomCrossEntropyLoss() self.loss_bbox = CustomSmoothL1Loss() self.out_channels = out_channels", "is not None: weight = weight.float() loss = weight_reduce_loss(loss, weight=weight,", "\"\"\"Smooth L1 Loss.\"\"\" def __init__(self, beta=1.0, reduction='mean', loss_weight=1.0): \"\"\"Init smooth", "forward(self, x): \"\"\"Get cls score.\"\"\" cls_scores = x[2][0] bbox_preds =", "weight_reduce_loss(loss, reduction=reduction, avg_factor=avg_factor) return loss def mask_cross_entropy(pred, target, label, reduction='mean',", ":param weight: weight :param avg_factor: avg factor :param reduction_override: reduce", "\"\"\"Weight reduce loss. :param loss: losses :param weight: weight :param", "F.cross_entropy(pred, label, reduction='none') if weight is not None: weight =", "multi_level_anchors, gt_bboxes, img_metas, gt_bboxes_ignore def multi_apply(func, *args, **kwargs): \"\"\"Multi apply.", "bbox_targets.reshape(-1, 4) bbox_weights = bbox_weights.reshape(-1, 4) bbox_pred = bbox_pred.permute(0, 2,", "__init__(self, use_sigmoid=False, use_mask=False, reduction='mean', loss_weight=1.0): \"\"\"Init Cross Entropy loss. :param", ":param label: gt label :param weight: weight :param reduction: reduce", "= list( self.anchor_strides) if self.anchor_base_sizes_cfg is None else self.anchor_base_sizes_cfg super(Anchors,", "return loss def _expand_binary_labels(labels, label_weights, label_channels): \"\"\"Expand binary labels. :param", "AnchorTargetOp(nn.Module): \"\"\"Anchor Target.\"\"\" def __init__(self, target_means=None, target_stds=None, num_classes=2, use_sigmoid_cls=False, cfg=None,", "for _ in range(num_imgs)] valid_flag_list = [] for img_id, img_meta", "return loss def mask_cross_entropy(pred, target, label, reduction='mean', avg_factor=None): \"\"\"Mask cross", "A PARTICULAR PURPOSE. See the # MIT License for more", "= [featmap.size()[-2:] for featmap in cls_scores] anchor_generators = [] for", "six.moves import map, zip from vega.search_space.networks.pytorch.losses.reduce_loss import weighted_loss @NetworkFactory.register(NetTypes.Operator) class", "utf-8 -*- # Copyright (C) 2020. Huawei Technologies Co., Ltd.", "assert reduction == 'mean' and avg_factor is None num_rois =", "preds.\"\"\" cls_scores = x[0] bbox_preds = x[1] return cls_scores, bbox_preds", "\"\"\" reduction_function = F._Reduction.get_enum(reduction) if reduction_function == 0: return loss", "function :param avg_factor: avg factor :return: loss \"\"\" if pred.dim()", "= ( reduction_override if reduction_override else self.reduction) loss_cls = self.loss_weight", "\"\"\"Smooth l1 loss. :param pred: predict :param target: target :param", "mask_cross_entropy(pred, target, label, reduction='mean', avg_factor=None): \"\"\"Mask cross entropy loss. :param", "gt_bboxes, img_metas, gt_bboxes_ignore = x featmap_sizes = [featmap.size()[-2:] for featmap", "feat_w), (valid_feat_h, valid_feat_w)) multi_level_flags.append(flags) valid_flag_list.append(multi_level_flags) return anchor_list, valid_flag_list, multi_level_anchors, gt_bboxes,", "beta, diff - 0.5 * beta) return loss def cross_entropy(pred,", "label_channels): \"\"\"Expand binary labels. :param labels: labels :param label_weights: label", "torch.abs(pred - target) loss = torch.where(diff < beta, 0.5 *", "0.5}, 'sampler': {'name': 'RandomSampler', 'num': 256, 'pos_fraction': 0.5, 'neg_pos_ub': -1,", "\"\"\"Multi apply. :param func: function :param args: args of function", "label].squeeze(1) return F.binary_cross_entropy_with_logits(pred_slice, target, reduction='mean')[None] def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None):", "labels, label_weights, bbox_targets, bbox_weights, num_total_samples=num_total_samples) return losses_cls, losses_bbox def loss(self,", "import numpy as np from six.moves import map, zip from", "weight = weight.float() loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor) return", "= out_channels def forward(self, x): \"\"\"Get x.\"\"\" (cls_score, bbox_pred, labels,", "dict \"\"\" super(CustomCrossEntropyLoss, self).__init__() self.use_sigmoid = use_sigmoid self.use_mask = use_mask", "\"\"\"Rpn input.\"\"\" def __init__(self): super(RpnClsLossInput, self).__init__() def forward(self, x): \"\"\"Get", "'mean', 'sum') reduction = ( reduction_override if reduction_override else self.reduction)", "l1 loss. :param pred: predict :param target: target :param beta:", "( reduction_override if reduction_override else self.reduction) if target.numel() > 0:", "self.loss_bbox(bbox_pred, bbox_targets, bbox_weights, avg_factor=num_total_samples) return loss_cls, loss_bbox @NetworkFactory.register(NetTypes.Operator) class CustomCrossEntropyLoss(nn.Module):", "\"\"\" reduction = ( reduction_override if reduction_override else self.reduction) if", "forward(self, cls_score, label, weight, avg_factor, reduction_override=None, **kwargs): \"\"\"Forward compute.\"\"\" assert", "binary_cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): \"\"\"Binary cross entropy loss. :param", "is None num_rois = pred.size()[0] inds = torch.arange(0, num_rois, dtype=torch.long,", "super(Anchors, self).__init__() def forward(self, x): \"\"\"Create anchor.\"\"\" cls_scores, bbox_preds, gt_bboxes,", "label: gt label :param reduction: reduce function :param avg_factor: avg", "valid_flag_list = [] for img_id, img_meta in enumerate(img_metas): multi_level_flags =", "target: target :param label: gt label :param reduction: reduce function", "F._Reduction.get_enum(reduction) if reduction_function == 0: return loss elif reduction_function ==", "= len(img_metas) num_levels = len(featmap_sizes) multi_level_anchors = [] for i", "= target_stds or (1.0, 1.0, 1.0, 1.0) self.label_channels = num_classes", "self.sampling = sampling super(AnchorTargetOp, self).__init__() def forward(self, x): \"\"\"Create X=(anchor_list,valid_flag_list,gt_bboxes,img_metas,).\"\"\"", "bbox_targets_list, bbox_weights_list, num_total_pos,num_total_neg). return AnchorTarget(anchor_list, valid_flag_list, gt_bboxes, img_metas, self.target_means, self.target_stds,", "loss * weight if avg_factor is None: loss = reduce_loss(loss,", "if use_sigmoid_cls else 1 self.cfg = Config({'assigner': {'name': 'MaxIoUAllNegAssigner', 'pos_iou_thr':", "loss = weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor) return loss def _expand_binary_labels(labels,", "def multi_apply(func, *args, **kwargs): \"\"\"Multi apply. :param func: function :param", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "'neg_iou_thr': tuple([-1, 0.3]), 'min_pos_iou': 0.3, 'ignore_iof_thr': 0.5}, 'sampler': {'name': 'RandomSampler',", "forward(self, x): \"\"\"Create X=(anchor_list,valid_flag_list,gt_bboxes,img_metas,).\"\"\" anchor_list, valid_flag_list, original_anchors, gt_bboxes, img_metas, gt_bboxes_ignore", "vega.search_space.networks.net_utils import NetTypes from vega.search_space.networks.pytorch.utils.anchor_utils.anchor_target import AnchorTarget from vega.search_space.networks.pytorch.utils.bbox_utils.anchor_generator import", "partial import numpy as np from six.moves import map, zip", "zip(*map_results))) @NetworkFactory.register(NetTypes.Operator) class RpnClsLoss(nn.Module): \"\"\"Rpn Class Loss.\"\"\" def __init__(self, out_channels=2):", "reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_bbox else: return torch.FloatTensor([0.0]).cuda() @weighted_loss def", "return torch.FloatTensor([0.0]).cuda() @weighted_loss def smooth_l1_loss(pred, target, beta=1.0): \"\"\"Smooth l1 loss.", "0, 'pos_weight': -1, 'debug': False}) self.sampling = sampling super(AnchorTargetOp, self).__init__()", "return loss_cls, loss_bbox @NetworkFactory.register(NetTypes.Operator) class CustomCrossEntropyLoss(nn.Module): \"\"\"Cross Entropy Loss.\"\"\" def", "and target.numel() > 0 diff = torch.abs(pred - target) loss", "anchor_list, valid_flag_list, original_anchors, gt_bboxes, img_metas, gt_bboxes_ignore = x # out=(labels_list,", ":param weight: weight :param reduction: reduce function :param avg_factor: avg", "bbox_weights, num_total_pos, num_total_neg, num_total_samples) = x losses_cls, losses_bbox = multi_apply(self.loss,", "anchor_stride)), feat_h) valid_feat_w = min(int(np.ceil(w / anchor_stride)), feat_w) flags =", "else self.reduction) loss_cls = self.loss_weight * self.loss_function(cls_score, label, weight, reduction=reduction,", "from vega.search_space.networks.pytorch.losses.reduce_loss import weighted_loss @NetworkFactory.register(NetTypes.Operator) class RpnClsLossInput(nn.Module): \"\"\"Rpn input.\"\"\" def", "AnchorTarget(anchor_list, valid_flag_list, gt_bboxes, img_metas, self.target_means, self.target_stds, self.cfg, gt_bboxes_ignore_list=gt_bboxes_ignore, gt_labels_list=None, label_channels=self.label_channels,", "# This program is free software; you can redistribute it", "super(CustomSmoothL1Loss, self).__init__() self.beta = beta self.reduction = reduction self.loss_weight =", "but WITHOUT ANY WARRANTY; without even the implied warranty of", "**kwargs): \"\"\"Multi apply. :param func: function :param args: args of", "of function :return: result \"\"\" pfunc = partial(func, **kwargs) if", "reduction_override=None, **kwargs): \"\"\"Forward compute. :param pred: predict :param target: target", "compute.\"\"\" assert reduction_override in (None, 'none', 'mean', 'sum') reduction =", "software; you can redistribute it and/or modify # it under", "for featmap in cls_scores] anchor_generators = [] for anchor_base in", "use_mask self.reduction = reduction self.loss_weight = loss_weight if self.use_sigmoid: self.loss_function", "= bbox_weights.reshape(-1, 4) bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4)", "= loss_weight def forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None, **kwargs):", "and avg_factor is None num_rois = pred.size()[0] inds = torch.arange(0,", "reduction_function = F._Reduction.get_enum(reduction) if reduction_function == 0: return loss elif", "'sampler': {'name': 'RandomSampler', 'num': 256, 'pos_fraction': 0.5, 'neg_pos_ub': -1, 'add_gt_as_proposals':", "smooth_l1_loss(pred, target, beta=1.0): \"\"\"Smooth l1 loss. :param pred: predict :param", "self.anchor_base_sizes_cfg super(Anchors, self).__init__() def forward(self, x): \"\"\"Create anchor.\"\"\" cls_scores, bbox_preds,", ":return: result \"\"\" pfunc = partial(func, **kwargs) if kwargs else", "self.target_means = target_means or (.0, .0, .0, .0) self.target_stds =", "Loss.\"\"\" def __init__(self, beta=1.0, reduction='mean', loss_weight=1.0): \"\"\"Init smooth l1 loss.\"\"\"", "predict result :param label: gt label :param weight: weight :param", "map, zip from vega.search_space.networks.pytorch.losses.reduce_loss import weighted_loss @NetworkFactory.register(NetTypes.Operator) class RpnClsLossInput(nn.Module): \"\"\"Rpn", "import torch.nn.functional as F import torch.nn as nn import torch", "pred, target, weight=None, avg_factor=None, reduction_override=None, **kwargs): \"\"\"Forward compute. :param pred:", "np from six.moves import map, zip from vega.search_space.networks.pytorch.losses.reduce_loss import weighted_loss", "anchor_ratios or [0.5, 1.0, 2.0] self.anchor_strides = anchor_strides or [4,", "\"\"\"Anchor Target.\"\"\" def __init__(self, target_means=None, target_stds=None, num_classes=2, use_sigmoid_cls=False, cfg=None, sampling=True):", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "def __init__(self, out_channels=2): super(RpnClsLoss, self).__init__() self.loss_cls = CustomCrossEntropyLoss() self.loss_bbox =", "self.beta = beta self.reduction = reduction self.loss_weight = loss_weight def", "forward(self, x): \"\"\"Get x.\"\"\" (cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights,", "reduction_function == 1: return loss.mean() elif reduction_function == 2: return", "loss.sum() / avg_factor elif reduction != 'none': raise ValueError('avg_factor can", "loss_cls = self.loss_cls(cls_score, labels, label_weights, avg_factor=num_total_samples) bbox_targets = bbox_targets.reshape(-1, 4)", "def __init__(self): super(RpnClsLossInput, self).__init__() def forward(self, x): \"\"\"Get cls score", "= anchor_generators[i].grid_anchors(featmap_sizes[i], self.anchor_strides[i]) multi_level_anchors.append(anchors) anchor_list = [multi_level_anchors for _ in", "feat_w = featmap_sizes[i] h, w, _ = img_meta['pad_shape'] valid_feat_h =", "weight is not None: weight = weight.float() loss = weight_reduce_loss(loss,", "avg factor :return: loss \"\"\" if pred.dim() != label.dim(): label,", "losses_bbox = multi_apply(self.loss, cls_score, bbox_pred, labels, label_weights, bbox_targets, bbox_weights, num_total_samples=num_total_samples)", "target.numel() > 0 diff = torch.abs(pred - target) loss =", "import weighted_loss @NetworkFactory.register(NetTypes.Operator) class RpnClsLossInput(nn.Module): \"\"\"Rpn input.\"\"\" def __init__(self): super(RpnClsLossInput,", "(.0, .0, .0, .0) self.target_stds = target_stds or (1.0, 1.0,", "= anchor_ratios or [0.5, 1.0, 2.0] self.anchor_strides = anchor_strides or", "bbox_targets = bbox_targets.reshape(-1, 4) bbox_weights = bbox_weights.reshape(-1, 4) bbox_pred =", "Entropy loss. :param desc: config dict \"\"\" super(CustomCrossEntropyLoss, self).__init__() self.use_sigmoid", "args of function :return: result \"\"\" pfunc = partial(func, **kwargs)", "!= label.dim(): label, weight = _expand_binary_labels(label, weight, pred.size(-1)) if weight", "num_imgs = len(img_metas) num_levels = len(featmap_sizes) multi_level_anchors = [] for", "label weights :param label_channels: label channels :return: binary label and", "map(pfunc, *args) return tuple(map(list, zip(*map_results))) @NetworkFactory.register(NetTypes.Operator) class RpnClsLoss(nn.Module): \"\"\"Rpn Class", "'none': raise ValueError('avg_factor can not be used with reduction=\"sum\"') return", "functools import partial import numpy as np from six.moves import", "= torch.nonzero(labels >= 1).squeeze() if inds.numel() > 0: bin_labels[inds, labels[inds]", "[4, 8, 16, 32, 64] self.anchor_base_sizes = list( self.anchor_strides) if", "enumerate(img_metas): multi_level_flags = [] for i in range(num_levels): anchor_stride =", "raise ValueError('avg_factor can not be used with reduction=\"sum\"') return loss", "< beta, 0.5 * diff * diff / beta, diff", "score.\"\"\" cls_scores = x[2][0] bbox_preds = x[2][1] gt_bboxes = x[0]['gt_bboxes'].cuda()", "num_levels = len(featmap_sizes) multi_level_anchors = [] for i in range(num_levels):", "AnchorTarget from vega.search_space.networks.pytorch.utils.bbox_utils.anchor_generator import AnchorGenerator from vega.core.common.config import Config from", "if kwargs else func map_results = map(pfunc, *args) return tuple(map(list,", "[featmap.size()[-2:] for featmap in cls_scores] anchor_generators = [] for anchor_base", "[] for anchor_base in self.anchor_base_sizes: anchor_generators.append(AnchorGenerator(anchor_base, self.anchor_scales, self.anchor_ratios)) num_imgs =", "pred: predict result :param target: target :param label: gt label", "loss \"\"\" assert beta > 0 assert pred.size() == target.size()", "= x[2][1] gt_bboxes = x[0]['gt_bboxes'].cuda() img_metas = [x[0]['img_meta']] gt_bboxes_ignore =", "return AnchorTarget(anchor_list, valid_flag_list, gt_bboxes, img_metas, self.target_means, self.target_stds, self.cfg, gt_bboxes_ignore_list=gt_bboxes_ignore, gt_labels_list=None,", "self.out_channels) loss_cls = self.loss_cls(cls_score, labels, label_weights, avg_factor=num_total_samples) bbox_targets = bbox_targets.reshape(-1,", "function :return: result \"\"\" pfunc = partial(func, **kwargs) if kwargs", "reduction=\"sum\"') return loss def reduce_loss(loss, reduction): \"\"\"Reduce loss compute. :param", "self.target_stds = target_stds or (1.0, 1.0, 1.0, 1.0) self.label_channels =", "labels.reshape(-1) label_weights = label_weights.reshape(-1) cls_score = cls_score.permute(0, 2, 3, 1).reshape(-1,", "None: weight = weight.float() loss = F.binary_cross_entropy_with_logits( pred, label.float(), weight,", "return cls_scores, bbox_preds @NetworkFactory.register(NetTypes.Operator) class RpnLossInput(nn.Module): \"\"\"Rpn loss input.\"\"\" def", "\"\"\"Get anchors according to feature map sizes.\"\"\" def __init__(self, anchor_base_sizes_cfg=None,", "self.anchor_scales = anchor_scales or [8, 16, 32] self.anchor_ratios = anchor_ratios", "_ in range(num_imgs)] valid_flag_list = [] for img_id, img_meta in", "import torch.nn as nn import torch from vega.search_space.networks.network_factory import NetworkFactory", "1 if label_weights is None: bin_label_weights = None else: bin_label_weights", "NetTypes from vega.search_space.networks.pytorch.utils.anchor_utils.anchor_target import AnchorTarget from vega.search_space.networks.pytorch.utils.bbox_utils.anchor_generator import AnchorGenerator from", "self.reduction = reduction self.loss_weight = loss_weight def forward(self, pred, target,", "cfg=None, sampling=True): self.target_means = target_means or (.0, .0, .0, .0)", "labels, label_weights, bbox_targets, bbox_weights, num_total_samples): \"\"\"Get loss.\"\"\" labels = labels.reshape(-1)", "* beta) return loss def cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None):", "label_weights.view(-1, 1).expand(label_weights.size(0), label_channels) return bin_labels, bin_label_weights def binary_cross_entropy(pred, label, weight=None,", "\"\"\"Rpn Class Loss.\"\"\" def __init__(self, out_channels=2): super(RpnClsLoss, self).__init__() self.loss_cls =", "= self.loss_cls(cls_score, labels, label_weights, avg_factor=num_total_samples) bbox_targets = bbox_targets.reshape(-1, 4) bbox_weights", ":return: loss \"\"\" reduction = ( reduction_override if reduction_override else", ":param label: gt label :param reduction: reduce function :param avg_factor:", "for anchor_base in self.anchor_base_sizes: anchor_generators.append(AnchorGenerator(anchor_base, self.anchor_scales, self.anchor_ratios)) num_imgs = len(img_metas)", ":param reduction: reduce funtion :return: loss \"\"\" reduction_function = F._Reduction.get_enum(reduction)", "= loss_weight if self.use_sigmoid: self.loss_function = binary_cross_entropy elif self.use_mask: self.loss_function", "def forward(self, x): \"\"\"Create anchor.\"\"\" cls_scores, bbox_preds, gt_bboxes, img_metas, gt_bboxes_ignore", "super(AnchorTargetOp, self).__init__() def forward(self, x): \"\"\"Create X=(anchor_list,valid_flag_list,gt_bboxes,img_metas,).\"\"\" anchor_list, valid_flag_list, original_anchors,", "loss_weight if self.use_sigmoid: self.loss_function = binary_cross_entropy elif self.use_mask: self.loss_function =", "compute. :param loss: losses :param reduction: reduce funtion :return: loss", "x): \"\"\"Get cls score and bbox preds.\"\"\" cls_scores = x[0]", "loss = F.cross_entropy(pred, label, reduction='none') if weight is not None:", "tuple([-1, 0.3]), 'min_pos_iou': 0.3, 'ignore_iof_thr': 0.5}, 'sampler': {'name': 'RandomSampler', 'num':", "def forward(self, x): \"\"\"Create X=(anchor_list,valid_flag_list,gt_bboxes,img_metas,).\"\"\" anchor_list, valid_flag_list, original_anchors, gt_bboxes, img_metas,", "not be used with reduction=\"sum\"') return loss def reduce_loss(loss, reduction):", "MIT License. # This program is distributed in the hope", "= map(pfunc, *args) return tuple(map(list, zip(*map_results))) @NetworkFactory.register(NetTypes.Operator) class RpnClsLoss(nn.Module): \"\"\"Rpn", "reduction_override: reduce override :return: loss \"\"\" reduction = ( reduction_override", "weight, beta=self.beta, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_bbox else: return torch.FloatTensor([0.0]).cuda()", "bbox_weights, avg_factor=num_total_samples) return loss_cls, loss_bbox @NetworkFactory.register(NetTypes.Operator) class CustomCrossEntropyLoss(nn.Module): \"\"\"Cross Entropy", "Target.\"\"\" def __init__(self, target_means=None, target_stds=None, num_classes=2, use_sigmoid_cls=False, cfg=None, sampling=True): self.target_means", "avg_factor, reduction_override=None, **kwargs): \"\"\"Forward compute.\"\"\" assert reduction_override in (None, 'none',", "num_total_pos, num_total_neg, num_total_samples) = x losses_cls, losses_bbox = multi_apply(self.loss, cls_score,", "gt label :param reduction: reduce function :param avg_factor: avg factor", "weight is not None: weight = weight.float() loss = F.binary_cross_entropy_with_logits(", "anchors = anchor_generators[i].grid_anchors(featmap_sizes[i], self.anchor_strides[i]) multi_level_anchors.append(anchors) anchor_list = [multi_level_anchors for _", "'neg_pos_ub': -1, 'add_gt_as_proposals': False}, 'allowed_border': 0, 'pos_weight': -1, 'debug': False})", "def forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None, **kwargs): \"\"\"Forward compute.", "0.5 * beta) return loss def cross_entropy(pred, label, weight=None, reduction='mean',", "-1, 'add_gt_as_proposals': False}, 'allowed_border': 0, 'pos_weight': -1, 'debug': False}) self.sampling", "in the hope that it will be useful, # but", "entropy loss. :param pred: predict result :param label: gt label", "reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_cls @NetworkFactory.register(NetTypes.Operator) class CustomSmoothL1Loss(nn.Module): \"\"\"Smooth L1", ":param target: target :param weight: weight :param avg_factor: avg factor", ":return: loss \"\"\" assert reduction == 'mean' and avg_factor is", "target, reduction='mean')[None] def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None): \"\"\"Weight reduce loss.", "Huawei Technologies Co., Ltd. All rights reserved. # This program", "class CustomSmoothL1Loss(nn.Module): \"\"\"Smooth L1 Loss.\"\"\" def __init__(self, beta=1.0, reduction='mean', loss_weight=1.0):", "else: return torch.FloatTensor([0.0]).cuda() @weighted_loss def smooth_l1_loss(pred, target, beta=1.0): \"\"\"Smooth l1", "self.loss_weight * self.loss_function(cls_score, label, weight, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_cls", "= mask_cross_entropy else: self.loss_function = cross_entropy def forward(self, cls_score, label,", "{'name': 'RandomSampler', 'num': 256, 'pos_fraction': 0.5, 'neg_pos_ub': -1, 'add_gt_as_proposals': False},", "in range(num_levels): anchors = anchor_generators[i].grid_anchors(featmap_sizes[i], self.anchor_strides[i]) multi_level_anchors.append(anchors) anchor_list = [multi_level_anchors", "reduction == 'mean': loss = loss.sum() / avg_factor elif reduction", "weighted_loss @NetworkFactory.register(NetTypes.Operator) class RpnClsLossInput(nn.Module): \"\"\"Rpn input.\"\"\" def __init__(self): super(RpnClsLossInput, self).__init__()", "L1 Loss.\"\"\" def __init__(self, beta=1.0, reduction='mean', loss_weight=1.0): \"\"\"Init smooth l1", "loss_weight def forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None, **kwargs): \"\"\"Forward", "loss_cls, loss_bbox @NetworkFactory.register(NetTypes.Operator) class CustomCrossEntropyLoss(nn.Module): \"\"\"Cross Entropy Loss.\"\"\" def __init__(self,", "valid_feat_w)) multi_level_flags.append(flags) valid_flag_list.append(multi_level_flags) return anchor_list, valid_flag_list, multi_level_anchors, gt_bboxes, img_metas, gt_bboxes_ignore", "anchor_scales=None, anchor_ratios=None, anchor_strides=None): self.anchor_base_sizes_cfg = anchor_base_sizes_cfg self.anchor_scales = anchor_scales or", "reduce function :param avg_factor: avg factor :return: loss \"\"\" loss", "or [8, 16, 32] self.anchor_ratios = anchor_ratios or [0.5, 1.0,", "entropy loss. :param pred: predict result :param target: target :param", ":param avg_factor: avg factor :param reduction_override: reduce override :return: loss", "valid_feat_h = min(int(np.ceil(h / anchor_stride)), feat_h) valid_feat_w = min(int(np.ceil(w /", "== 'mean' and avg_factor is None num_rois = pred.size()[0] inds", "gt_bboxes_ignore_list=gt_bboxes_ignore, gt_labels_list=None, label_channels=self.label_channels, sampling=self.sampling) @NetworkFactory.register(NetTypes.Operator) class Anchors(nn.Module): \"\"\"Get anchors according", "label_channels), 0) inds = torch.nonzero(labels >= 1).squeeze() if inds.numel() >", "not None: weight = weight.float() loss = F.binary_cross_entropy_with_logits( pred, label.float(),", "as F import torch.nn as nn import torch from vega.search_space.networks.network_factory", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or", "bbox_weights, num_total_samples=num_total_samples) return losses_cls, losses_bbox def loss(self, cls_score, bbox_pred, labels,", "self.loss_weight = loss_weight if self.use_sigmoid: self.loss_function = binary_cross_entropy elif self.use_mask:", "Loss.\"\"\" def __init__(self, use_sigmoid=False, use_mask=False, reduction='mean', loss_weight=1.0): \"\"\"Init Cross Entropy", "/ avg_factor elif reduction != 'none': raise ValueError('avg_factor can not", "range(num_imgs)] valid_flag_list = [] for img_id, img_meta in enumerate(img_metas): multi_level_flags", "= weight_reduce_loss(loss, weight=weight, reduction=reduction, avg_factor=avg_factor) return loss def _expand_binary_labels(labels, label_weights,", "# MIT License for more details. \"\"\"Import all torch operators.\"\"\"", "x featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] anchor_generators =", "pred: predict :param target: target :param weight: weight :param avg_factor:", "def cross_entropy(pred, label, weight=None, reduction='mean', avg_factor=None): \"\"\"Cross entropy losses. :param", "label, weight=None, reduction='mean', avg_factor=None): \"\"\"Binary cross entropy loss. :param pred:", "loss \"\"\" assert reduction == 'mean' and avg_factor is None", "cls_score.permute(0, 2, 3, 1).reshape(-1, self.out_channels) loss_cls = self.loss_cls(cls_score, labels, label_weights,", "-1, 'debug': False}) self.sampling = sampling super(AnchorTargetOp, self).__init__() def forward(self,", "img_metas = [x[0]['img_meta']] gt_bboxes_ignore = x[0]['gt_bboxes_ignore'].cuda() return cls_scores, bbox_preds, gt_bboxes,", "loss. :param desc: config dict \"\"\" super(CustomCrossEntropyLoss, self).__init__() self.use_sigmoid =", "if self.use_sigmoid: self.loss_function = binary_cross_entropy elif self.use_mask: self.loss_function = mask_cross_entropy", "avg_factor: avg factor :return: loss \"\"\" if pred.dim() != label.dim():", "pred, label.float(), weight, reduction='none') loss = weight_reduce_loss(loss, reduction=reduction, avg_factor=avg_factor) return", "target, label, reduction='mean', avg_factor=None): \"\"\"Mask cross entropy loss. :param pred:", "> 0: bin_labels[inds, labels[inds] - 1] = 1 if label_weights", "target.numel() > 0: loss_bbox = self.loss_weight * smooth_l1_loss( pred, target,", "weight :param reduction: reduce function :param avg_factor: avg factor :return:", "loss. :param pred: predict result :param target: target :param label:", "64] self.anchor_base_sizes = list( self.anchor_strides) if self.anchor_base_sizes_cfg is None else", "target, beta=1.0): \"\"\"Smooth l1 loss. :param pred: predict :param target:", "\"\"\" assert reduction == 'mean' and avg_factor is None num_rois", "is distributed in the hope that it will be useful,", "F.binary_cross_entropy_with_logits( pred, label.float(), weight, reduction='none') loss = weight_reduce_loss(loss, reduction=reduction, avg_factor=avg_factor)", "avg_factor=avg_factor) return loss def _expand_binary_labels(labels, label_weights, label_channels): \"\"\"Expand binary labels.", "avg_factor is None: loss = reduce_loss(loss, reduction) else: if reduction", "8, 16, 32, 64] self.anchor_base_sizes = list( self.anchor_strides) if self.anchor_base_sizes_cfg", "import map, zip from vega.search_space.networks.pytorch.losses.reduce_loss import weighted_loss @NetworkFactory.register(NetTypes.Operator) class RpnClsLossInput(nn.Module):", "for more details. \"\"\"Import all torch operators.\"\"\" import torch.nn.functional as", "bbox_weights, num_total_samples): \"\"\"Get loss.\"\"\" labels = labels.reshape(-1) label_weights = label_weights.reshape(-1)", "def __init__(self, use_sigmoid=False, use_mask=False, reduction='mean', loss_weight=1.0): \"\"\"Init Cross Entropy loss.", "as np from six.moves import map, zip from vega.search_space.networks.pytorch.losses.reduce_loss import", "cls_score, label, weight, avg_factor, reduction_override=None, **kwargs): \"\"\"Forward compute.\"\"\" assert reduction_override", "self).__init__() self.beta = beta self.reduction = reduction self.loss_weight = loss_weight", "= anchor_generators[i].valid_flags((feat_h, feat_w), (valid_feat_h, valid_feat_w)) multi_level_flags.append(flags) valid_flag_list.append(multi_level_flags) return anchor_list, valid_flag_list,", "gt label :param weight: weight :param reduction: reduce function :param", "config dict \"\"\" super(CustomCrossEntropyLoss, self).__init__() self.use_sigmoid = use_sigmoid self.use_mask =", "> 0 assert pred.size() == target.size() and target.numel() > 0", "\"\"\"Mask cross entropy loss. :param pred: predict result :param target:", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "multi_apply(func, *args, **kwargs): \"\"\"Multi apply. :param func: function :param args:", "torch.arange(0, num_rois, dtype=torch.long, device=pred.device) pred_slice = pred[inds, label].squeeze(1) return F.binary_cross_entropy_with_logits(pred_slice,", "torch operators.\"\"\" import torch.nn.functional as F import torch.nn as nn", "without even the implied warranty of # MERCHANTABILITY or FITNESS", "apply. :param func: function :param args: args of function :return:" ]